repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
pulseenergy/airbrake4j | src/main/java/com/pulseenergy/oss/airbrake/AirbrakeUtil.java | 518 | package com.pulseenergy.oss.airbrake;
public class AirbrakeUtil {
private AirbrakeUtil() {
}
public static final String DEFAULT_AIRBRAKE_URI = "http://api.airbrake.io/notifier_api/v2/notices";
public static String getAirbrakeUriOrDefault(final String uri) {
if (isNotBlank(uri)) {
return uri;
}
return DEFAULT_AIRBRAKE_URI;
}
private static boolean isNotBlank(final String uri) {
if (uri == null) {
return false;
}
if (uri.trim().length() == 0) {
return false;
}
return true;
}
}
| apache-2.0 |
remkop/picocli | picocli-examples/src/main/java/picocli/examples/git/GitCommit.java | 3235 | package picocli.examples.git;
import picocli.CommandLine;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
@CommandLine.Command(name = "commit",
sortOptions = false,
headerHeading = "@|bold,underline Usage:|@%n%n",
synopsisHeading = "%n",
descriptionHeading = "%n@|bold,underline Description:|@%n%n",
parameterListHeading = "%n@|bold,underline Parameters:|@%n",
optionListHeading = "%n@|bold,underline Options:|@%n",
header = "Record changes to the repository.",
description = "Stores the current contents of the index in a new commit " +
"along with a log message from the user describing the changes.")
class GitCommit implements Callable<Integer> {
@CommandLine.Option(names = {"-a", "--all"},
description = "Tell the command to automatically stage files that have been modified " +
"and deleted, but new files you have not told Git about are not affected.")
boolean all;
@CommandLine.Option(names = {"-p", "--patch"}, description = "Use the interactive patch selection interface to chose which changes to commit")
boolean patch;
@CommandLine.Option(names = {"-C", "--reuse-message"}, paramLabel = "<commit>",
description = "Take an existing commit object, and reuse the log message and the " +
"authorship information (including the timestamp) when creating the commit.")
String reuseMessageCommit;
@CommandLine.Option(names = {"-c", "--reedit-message"}, paramLabel = "<commit>",
description = "Like -C, but with -c the editor is invoked, so that the user can" +
"further edit the commit message.")
String reEditMessageCommit;
@CommandLine.Option(names = "--fixup", paramLabel = "<commit>",
description = "Construct a commit message for use with rebase --autosquash.")
String fixupCommit;
@CommandLine.Option(names = "--squash", paramLabel = "<commit>",
description = "Construct a commit message for use with rebase --autosquash. The commit" +
"message subject line is taken from the specified commit with a prefix of " +
"\"squash! \". Can be used with additional commit message options (-m/-c/-C/-F).")
String squashCommit;
@CommandLine.Option(names = {"-F", "--file"}, paramLabel = "<file>",
description = "Take the commit message from the given file. Use - to read the message from the standard input.")
File file;
@CommandLine.Option(names = {"-m", "--message"}, paramLabel = "<msg>",
description = "Use the given <msg> as the commit message. If multiple -m options" +
" are given, their values are concatenated as separate paragraphs.")
List<String> message = new ArrayList<String>();
@CommandLine.Parameters(paramLabel = "<files>", description = "the files to commit")
List<File> files = new ArrayList<File>();
@Override
public Integer call() throws Exception {
System.out.println("Your files have been committed.");
boolean ok = true;
return ok ? 0 : 1;
}
}
| apache-2.0 |
opennetworkinglab/spring-open | src/main/java/net/onrc/onos/core/linkdiscovery/web/LinksResource.java | 1350 | package net.onrc.onos.core.linkdiscovery.web;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import net.onrc.onos.core.linkdiscovery.ILinkDiscoveryService;
import net.onrc.onos.core.linkdiscovery.Link;
import net.onrc.onos.core.linkdiscovery.LinkInfo;
import org.restlet.resource.Get;
import org.restlet.resource.ServerResource;
public class LinksResource extends ServerResource {
@Get("json")
public Set<LinkWithType> retrieve() {
ILinkDiscoveryService ld = (ILinkDiscoveryService) getContext().getAttributes().
get(ILinkDiscoveryService.class.getCanonicalName());
Map<Link, LinkInfo> links = new HashMap<Link, LinkInfo>();
Set<LinkWithType> returnLinkSet = new HashSet<LinkWithType>();
if (ld != null) {
links.putAll(ld.getLinks());
for (Entry<Link, LinkInfo> e : links.entrySet()) {
Link link = e.getKey();
LinkInfo info = e.getValue();
LinkWithType lwt = new LinkWithType(link,
info.getSrcPortStateInteger(),
info.getDstPortStateInteger(),
info.getLinkType());
returnLinkSet.add(lwt);
}
}
return returnLinkSet;
}
}
| apache-2.0 |
ChDiego/myboss | src/main/java/com/myboss/cn/modules/sys/dao/MenuDao.java | 651 | /**
* Copyright © 2012-2016 <a href="https://github.com/thinkgem/jeesite">JeeSite</a> All rights reserved.
*/
package com.myboss.cn.modules.sys.dao;
import java.util.List;
import com.myboss.cn.common.persistence.CrudDao;
import com.myboss.cn.common.persistence.annotation.MyBatisDao;
import com.myboss.cn.modules.sys.entity.Menu;
/**
* 菜单DAO接口
* @author ThinkGem
* @version 2014-05-16
*/
@MyBatisDao
public interface MenuDao extends CrudDao<Menu> {
public List<Menu> findByParentIdsLike(Menu menu);
public List<Menu> findByUserId(Menu menu);
public int updateParentIds(Menu menu);
public int updateSort(Menu menu);
}
| apache-2.0 |
kevin-faro/papaya | src/test/java/com/faro/papaya/LzyMapTest.java | 1136 | package com.faro.papaya;
import java.util.HashMap;
import java.util.concurrent.atomic.AtomicInteger;
import org.junit.Assert;
import org.junit.Test;
import com.faro.papaya.util.LzyMap;
import com.faro.papaya.util.ValueFactory;
public class LzyMapTest {
@Test
public void testLoad() {
LzyMap<String, AtomicInteger> map = new LzyMap<>(new HashMap<String, AtomicInteger>());
ValueFactory<String, AtomicInteger> aIntFactory = new ValueFactory<String, AtomicInteger>() {
@Override
public AtomicInteger build(String key) {
return new AtomicInteger(0);
}
};
Assert.assertNull(map.get("abc"));
Assert.assertEquals(0, map.get("abc", aIntFactory).intValue());
Assert.assertNotNull(map.get("abc"));
}
@Test
public void testNullNotLoad() {
LzyMap<String, AtomicInteger> map = new LzyMap<>(new HashMap<String, AtomicInteger>());
ValueFactory<String, AtomicInteger> aIntFactory = new ValueFactory<String, AtomicInteger>() {
@Override
public AtomicInteger build(String key) {
return new AtomicInteger(0);
}
};
map.put("abc", null);
Assert.assertNull(map.get("abc", aIntFactory));
}
}
| apache-2.0 |
k9mail/k-9 | app/ui/legacy/src/main/java/com/fsck/k9/ui/messageview/MessageCryptoPresenter.java | 7778 | package com.fsck.k9.ui.messageview;
import android.app.Activity;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.content.IntentSender;
import android.content.pm.PackageManager.NameNotFoundException;
import android.graphics.drawable.Drawable;
import android.os.Parcelable;
import androidx.annotation.Nullable;
import android.text.TextUtils;
import com.fsck.k9.Account;
import com.fsck.k9.mailstore.CryptoResultAnnotation;
import com.fsck.k9.mailstore.MessageViewInfo;
import com.fsck.k9.view.MessageCryptoDisplayStatus;
import timber.log.Timber;
@SuppressWarnings("WeakerAccess")
public class MessageCryptoPresenter implements OnCryptoClickListener {
public static final int REQUEST_CODE_UNKNOWN_KEY = 123;
public static final int REQUEST_CODE_SECURITY_WARNING = 124;
// injected state
private final MessageCryptoMvpView messageCryptoMvpView;
// transient state
private CryptoResultAnnotation cryptoResultAnnotation;
private boolean reloadOnResumeWithoutRecreateFlag;
public MessageCryptoPresenter(MessageCryptoMvpView messageCryptoMvpView) {
this.messageCryptoMvpView = messageCryptoMvpView;
}
public void onResume() {
if (reloadOnResumeWithoutRecreateFlag) {
reloadOnResumeWithoutRecreateFlag = false;
messageCryptoMvpView.restartMessageCryptoProcessing();
}
}
public boolean maybeHandleShowMessage(MessageTopView messageView, Account account, MessageViewInfo messageViewInfo) {
this.cryptoResultAnnotation = messageViewInfo.cryptoResultAnnotation;
MessageCryptoDisplayStatus displayStatus =
MessageCryptoDisplayStatus.fromResultAnnotation(messageViewInfo.cryptoResultAnnotation);
if (displayStatus == MessageCryptoDisplayStatus.DISABLED) {
return false;
}
messageView.getMessageHeaderView().setCryptoStatus(displayStatus);
switch (displayStatus) {
case CANCELLED: {
Drawable providerIcon = getOpenPgpApiProviderIcon(messageView.getContext(), account.getOpenPgpProvider());
messageView.showMessageCryptoCancelledView(messageViewInfo, providerIcon);
break;
}
case INCOMPLETE_ENCRYPTED: {
Drawable providerIcon = getOpenPgpApiProviderIcon(messageView.getContext(), account.getOpenPgpProvider());
messageView.showMessageEncryptedButIncomplete(messageViewInfo, providerIcon);
break;
}
case ENCRYPTED_ERROR:
case UNSUPPORTED_ENCRYPTED: {
Drawable providerIcon = getOpenPgpApiProviderIcon(messageView.getContext(), account.getOpenPgpProvider());
messageView.showMessageCryptoErrorView(messageViewInfo, providerIcon);
break;
}
case ENCRYPTED_NO_PROVIDER: {
messageView.showCryptoProviderNotConfigured(messageViewInfo);
break;
}
case INCOMPLETE_SIGNED:
case UNSUPPORTED_SIGNED:
default: {
messageView.showMessage(account, messageViewInfo);
break;
}
case LOADING: {
throw new IllegalStateException("Displaying message while in loading state!");
}
}
return true;
}
@Override
public void onCryptoClick() {
if (cryptoResultAnnotation == null) {
return;
}
MessageCryptoDisplayStatus displayStatus =
MessageCryptoDisplayStatus.fromResultAnnotation(cryptoResultAnnotation);
switch (displayStatus) {
case LOADING:
// no need to do anything, there is a progress bar...
break;
default:
displayCryptoInfoDialog(displayStatus);
break;
}
}
@SuppressWarnings("UnusedParameters") // for consistency with Activity.onActivityResult
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == REQUEST_CODE_UNKNOWN_KEY) {
if (resultCode != Activity.RESULT_OK) {
return;
}
messageCryptoMvpView.restartMessageCryptoProcessing();
} else if (requestCode == REQUEST_CODE_SECURITY_WARNING) {
messageCryptoMvpView.redisplayMessage();
} else {
throw new IllegalStateException("got an activity result that wasn't meant for us. this is a bug!");
}
}
private void displayCryptoInfoDialog(MessageCryptoDisplayStatus displayStatus) {
messageCryptoMvpView.showCryptoInfoDialog(
displayStatus, cryptoResultAnnotation.hasOpenPgpInsecureWarningPendingIntent());
}
void onClickSearchKey() {
try {
PendingIntent pendingIntent = cryptoResultAnnotation.getOpenPgpSigningKeyIntentIfAny();
if (pendingIntent != null) {
messageCryptoMvpView.startPendingIntentForCryptoPresenter(
pendingIntent.getIntentSender(), REQUEST_CODE_UNKNOWN_KEY, null, 0, 0, 0);
}
} catch (IntentSender.SendIntentException e) {
Timber.e(e, "SendIntentException");
}
}
public void onClickShowCryptoKey() {
try {
PendingIntent pendingIntent = cryptoResultAnnotation.getOpenPgpSigningKeyIntentIfAny();
if (pendingIntent != null) {
messageCryptoMvpView.startPendingIntentForCryptoPresenter(
pendingIntent.getIntentSender(), null, null, 0, 0, 0);
}
} catch (IntentSender.SendIntentException e) {
Timber.e(e, "SendIntentException");
}
}
public void onClickRetryCryptoOperation() {
messageCryptoMvpView.restartMessageCryptoProcessing();
}
public void onClickShowCryptoWarningDetails() {
try {
PendingIntent pendingIntent = cryptoResultAnnotation.getOpenPgpInsecureWarningPendingIntent();
if (pendingIntent != null) {
messageCryptoMvpView.startPendingIntentForCryptoPresenter(
pendingIntent.getIntentSender(), REQUEST_CODE_SECURITY_WARNING, null, 0, 0, 0);
}
} catch (IntentSender.SendIntentException e) {
Timber.e(e, "SendIntentException");
}
}
public Parcelable getDecryptionResultForReply() {
if (cryptoResultAnnotation != null && cryptoResultAnnotation.isOpenPgpResult()) {
return cryptoResultAnnotation.getOpenPgpDecryptionResult();
}
return null;
}
@Nullable
private static Drawable getOpenPgpApiProviderIcon(Context context, String openPgpProvider) {
try {
if (TextUtils.isEmpty(openPgpProvider)) {
return null;
}
return context.getPackageManager().getApplicationIcon(openPgpProvider);
} catch (NameNotFoundException e) {
return null;
}
}
public void onClickConfigureProvider() {
reloadOnResumeWithoutRecreateFlag = true;
messageCryptoMvpView.showCryptoConfigDialog();
}
public interface MessageCryptoMvpView {
void redisplayMessage();
void restartMessageCryptoProcessing();
void startPendingIntentForCryptoPresenter(IntentSender si, Integer requestCode, Intent fillIntent,
int flagsMask, int flagValues, int extraFlags) throws IntentSender.SendIntentException;
void showCryptoInfoDialog(MessageCryptoDisplayStatus displayStatus, boolean hasSecurityWarning);
void showCryptoConfigDialog();
}
}
| apache-2.0 |
NatashaPanchina/npanchina | chapter_004/src/main/java/ru/job4j/filter/ArrayFilter.java | 506 | package ru.job4j.filter;
import java.util.Arrays;
/**
* ArrayFilter.
*
* @author Natasha Panchina (panchinanata25@gmail.com)
* @version 1
* @since 18.01.2019
*/
public class ArrayFilter {
/**
* Method filter
* @param numbers
* @return sum of numbers' squares
*/
public int filter(int[] numbers) {
return Arrays.stream(numbers)
.filter(n -> n % 2 == 0)
.map(n -> n * n)
.reduce(0, (one, two) -> one + two);
}
}
| apache-2.0 |
CraigAndrew/titanium4j | src/com/emitrom/ti4j/mobile/client/core/events/ResumeEvent.java | 1583 | /**************************************************************************
ResumeEvent.java is part of Titanium4j Mobile 3.0. Copyright 2012 Emitrom LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
**************************************************************************/
package com.emitrom.ti4j.mobile.client.core.events;
import com.emitrom.ti4j.mobile.client.core.handlers.activity.ResumeHandler;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.event.dom.client.DomEvent.Type;
public class ResumeEvent extends TiEvent {
public static final String RESUME = "resume";
public static final String RESUMED = "resumed";
/**
* UiBinder implementations
*/
private static Type<ResumeHandler> TYPE = new Type<ResumeHandler>(RESUME, null);
public static Type<ResumeHandler> getType() {
return TYPE;
}
public static Type<ResumeHandler> getAssociatedType() {
return TYPE;
}
protected ResumeEvent() {
}
@SuppressWarnings("unused")
private ResumeEvent(JavaScriptObject obj) {
jsObj = obj;
}
}
| apache-2.0 |
DeuceSTM/DeuceSTM | src/java/org/deuce/transform/asm/method/MethodTransformer.java | 8309 | package org.deuce.transform.asm.method;
import java.util.HashMap;
import org.deuce.Irrevocable;
import org.deuce.Unsafe;
import org.deuce.objectweb.asm.AnnotationVisitor;
import org.deuce.objectweb.asm.Attribute;
import org.deuce.objectweb.asm.Label;
import org.deuce.objectweb.asm.MethodVisitor;
import org.deuce.objectweb.asm.Opcodes;
import org.deuce.objectweb.asm.Type;
import org.deuce.objectweb.asm.commons.AnalyzerAdapter;
import org.deuce.objectweb.asm.commons.Method;
import org.deuce.transaction.ContextDelegator;
import org.deuce.transform.asm.FieldsHolder;
import org.deuce.transform.util.Util;
import static org.deuce.objectweb.asm.Opcodes.*;
public class MethodTransformer implements MethodVisitor{
final static private String UNSAFE_DESCRIPTOR = Type.getDescriptor(Unsafe.class);
final static private String IRREVOCABLE_DESCRIPTOR = Type.getDescriptor(Irrevocable.class);
private MethodVisitor originalMethod;
final private MethodVisitor originalCopyMethod;
private MethodVisitor copyMethod;
final private String className;
final private String methodName;
final private String descriptor; // original descriptor
final private HashMap<Label, Label> labelMap = new HashMap<Label, Label>();
final private boolean isStatic;
private boolean isIrrevocable;
final private Method newMethod;
public MethodTransformer(MethodVisitor originalMethod, MethodVisitor copyMethod,
String className, int access, String methodName, String descriptor, Method newMethod,
FieldsHolder fieldsHolder) {
this.originalMethod = originalMethod;
this.newMethod = newMethod;
this.isStatic = (access & ACC_STATIC) != 0;
this.originalCopyMethod = copyMethod; // save duplicate method without instrumentation.
// The AnalyzerAdapter delegates the call to the DuplicateMethod, while the DuplicateMethod uses
// the analyzer for stack state in the original method.
DuplicateMethod duplicateMethod = new DuplicateMethod( copyMethod, isStatic, newMethod, fieldsHolder);
AnalyzerAdapter analyzerAdapter = new AnalyzerAdapter( className, access, methodName, descriptor, duplicateMethod);
duplicateMethod.setAnalyzer( analyzerAdapter);
this.copyMethod = analyzerAdapter;
this.className = className;
this.methodName = methodName;
this.descriptor = descriptor;
}
public void visitCode() {
originalMethod.visitCode();
copyMethod.visitCode();
if(isIrrevocable){ //Call onIrrevocableAccess
int argumentsSize = Util.calcArgumentsSize(isStatic, newMethod);
copyMethod.visitVarInsn(Opcodes.ALOAD, argumentsSize - 1); // load context
copyMethod.visitMethodInsn( Opcodes.INVOKESTATIC, ContextDelegator.CONTEXT_DELEGATOR_INTERNAL,
ContextDelegator.IRREVOCABLE_METHOD_NAME, ContextDelegator.IRREVOCABLE_METHOD_DESC);
}
}
public AnnotationVisitor visitAnnotation(String desc, boolean visible) {
// FIXME we might saw other annotations before and we need to put it on the new AtomicMethod
// need to create an atomic method from the original method
if( AtomicMethod.ATOMIC_DESCRIPTOR.equals(desc) && !(originalMethod instanceof AtomicMethod))
originalMethod = new AtomicMethod( originalMethod, className, methodName,
descriptor, newMethod, isStatic);
if( UNSAFE_DESCRIPTOR.equals(desc)) // if marked as Unsafe no just duplicate the method as is.
copyMethod = originalCopyMethod;
//Is marked as @Irrevocable
if(IRREVOCABLE_DESCRIPTOR.equals(desc)){
copyMethod = originalCopyMethod; // no need to instrument call
isIrrevocable = true;
}
if( desc.contains("org/junit")) // TODO find another way
return originalMethod.visitAnnotation(desc, visible);
return new MethodAnnotationVisitor( originalMethod.visitAnnotation(desc, visible),
copyMethod.visitAnnotation(desc, visible));
}
public AnnotationVisitor visitAnnotationDefault() {
return new MethodAnnotationVisitor( originalMethod.visitAnnotationDefault(),
copyMethod.visitAnnotationDefault());
}
public void visitAttribute(Attribute attr) {
originalMethod.visitAttribute(attr);
copyMethod.visitAttribute(attr);
}
public void visitEnd() {
originalMethod.visitEnd();
copyMethod.visitEnd();
}
public void visitFieldInsn(int opcode, String owner, String name, String desc) {
originalMethod.visitFieldInsn(opcode, owner, name, desc);
copyMethod.visitFieldInsn(opcode, owner, name, desc);
}
public void visitFrame(int type, int local, Object[] local2, int stack, Object[] stack2) {
originalMethod.visitFrame(type, local, local2, stack, stack2);
copyMethod.visitFrame(type, local, local2, stack, stack2);
}
public void visitIincInsn(int var, int increment) {
originalMethod.visitIincInsn(var, increment);
copyMethod.visitIincInsn(var, increment);
}
public void visitInsn(int opcode) {
originalMethod.visitInsn(opcode);
copyMethod.visitInsn(opcode);
}
public void visitIntInsn(int opcode, int operand) {
originalMethod.visitIntInsn(opcode, operand);
copyMethod.visitIntInsn(opcode, operand);
}
public void visitJumpInsn(int opcode, Label label) {
originalMethod.visitJumpInsn(opcode, label);
copyMethod.visitJumpInsn(opcode, getLabel(label));
}
public void visitLabel(Label label) {
originalMethod.visitLabel(label);
copyMethod.visitLabel(getLabel(label));
}
public void visitLdcInsn(Object cst) {
originalMethod.visitLdcInsn(cst);
copyMethod.visitLdcInsn(cst);
}
public void visitLineNumber(int line, Label start) {
originalMethod.visitLineNumber(line, start);
copyMethod.visitLineNumber(line, getLabel(start));
}
public void visitLocalVariable(String name, String desc, String signature, Label start,
Label end, int index) {
originalMethod.visitLocalVariable(name, desc, signature, start, end, index);
copyMethod.visitLocalVariable(name, desc, signature, getLabel(start), getLabel(end), index);
}
public void visitLookupSwitchInsn(Label dflt, int[] keys, Label[] labels) {
originalMethod.visitLookupSwitchInsn(dflt, keys, labels);
copyMethod.visitLookupSwitchInsn( getLabel(dflt), keys, getCopyLabels(labels));
}
public void visitMaxs(int maxStack, int maxLocals) {
originalMethod.visitMaxs(maxStack, maxLocals);
copyMethod.visitMaxs(maxStack, maxLocals);
}
public void visitMethodInsn(int opcode, String owner, String name, String desc) {
originalMethod.visitMethodInsn(opcode, owner, name, desc);
copyMethod.visitMethodInsn(opcode, owner, name, desc);
}
public void visitMultiANewArrayInsn(String desc, int dims) {
originalMethod.visitMultiANewArrayInsn(desc, dims);
copyMethod.visitMultiANewArrayInsn(desc, dims);
}
public AnnotationVisitor visitParameterAnnotation(int parameter, String desc, boolean visible) {
return new MethodAnnotationVisitor( originalMethod.visitParameterAnnotation(parameter, desc, visible),
copyMethod.visitParameterAnnotation(parameter, desc, visible));
}
public void visitTableSwitchInsn(int min, int max, Label dflt, Label[] labels) {
originalMethod.visitTableSwitchInsn(min, max, dflt, labels);
copyMethod.visitTableSwitchInsn(min, max, getLabel(dflt), getCopyLabels(labels));
}
public void visitTryCatchBlock(Label start, Label end, Label handler, String type) {
originalMethod.visitTryCatchBlock(start, end, handler, type);
copyMethod.visitTryCatchBlock(getLabel(start), getLabel(end), getLabel(handler), type);
}
public void visitTypeInsn(int opcode, String type) {
originalMethod.visitTypeInsn(opcode, type);
copyMethod.visitTypeInsn(opcode, type);
}
public void visitVarInsn(int opcode, int var) {
originalMethod.visitVarInsn(opcode, var);
copyMethod.visitVarInsn(opcode, var);
}
private Label[] getCopyLabels(Label[] labels) {
Label[] copyLabels = new Label[ labels.length];
for( int i=0; i<labels.length ;++i) {
copyLabels[i] = getLabel(labels[i]);
}
return copyLabels;
}
private Label getLabel( Label label){
Label duplicateLabel = labelMap.get( label);
if( duplicateLabel == null) {
duplicateLabel = new Label();
labelMap.put(label, duplicateLabel);
}
return duplicateLabel;
}
}
| apache-2.0 |
347184068/gmenergy | src/main/java/com/tp/ems/modules/sys/dao/DictDao.java | 510 | /**
* Copyright © 2012-2014 <a href="https://github.com/thinkgem/jeesite">JeeSite</a> All rights reserved.
*/
package com.tp.ems.modules.sys.dao;
import java.util.List;
import com.tp.ems.common.persistence.CrudDao;
import com.tp.ems.common.persistence.annotation.MyBatisDao;
import com.tp.ems.modules.sys.entity.Dict;
/**
* 字典DAO接口
* @author ThinkGem
* @version 2014-05-16
*/
@MyBatisDao
public interface DictDao extends CrudDao<Dict> {
public List<String> findTypeList(Dict dict);
}
| apache-2.0 |
peter-gergely-horvath/kylo | services/operational-metadata-service/operational-metadata-integration-service/src/main/java/com/thinkbiganalytics/metadata/jobrepo/StreamingFeedService.java | 3940 | package com.thinkbiganalytics.metadata.jobrepo;
/*-
* #%L
* thinkbig-operational-metadata-integration-service
* %%
* Copyright (C) 2017 ThinkBig Analytics
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.thinkbiganalytics.metadata.api.MetadataAccess;
import com.thinkbiganalytics.metadata.api.event.MetadataEventListener;
import com.thinkbiganalytics.metadata.api.event.MetadataEventService;
import com.thinkbiganalytics.metadata.api.event.feed.FeedChangeEvent;
import com.thinkbiganalytics.metadata.api.feed.Feed;
import com.thinkbiganalytics.metadata.api.feed.OpsManagerFeed;
import com.thinkbiganalytics.metadata.api.feed.OpsManagerFeedProvider;
import com.thinkbiganalytics.metadata.api.jobrepo.ExecutionConstants;
import com.thinkbiganalytics.metadata.api.jobrepo.job.BatchJobExecution;
import com.thinkbiganalytics.metadata.api.jobrepo.job.BatchJobExecutionProvider;
import com.thinkbiganalytics.metadata.jobrepo.nifi.provenance.ProvenanceEventFeedUtil;
import org.joda.time.DateTime;
import java.util.Optional;
import javax.annotation.Nonnull;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
/**
* Created by sr186054 on 6/17/17.
*/
public class StreamingFeedService {
@Inject
private MetadataEventService metadataEventService;
@Inject
protected MetadataAccess metadataAccess;
@Inject
private BatchJobExecutionProvider batchJobExecutionProvider;
@Inject
OpsManagerFeedProvider opsManagerFeedProvider;
@Inject
ProvenanceEventFeedUtil provenanceEventFeedUtil;
/**
* Event listener for precondition events
*/
private final MetadataEventListener<FeedChangeEvent> feedPropertyChangeListener = new FeedChangeEventDispatcher();
@PostConstruct
public void addEventListener() {
metadataEventService.addListener(feedPropertyChangeListener);
}
private class FeedChangeEventDispatcher implements MetadataEventListener<FeedChangeEvent> {
@Override
public void notify(@Nonnull final FeedChangeEvent metadataEvent) {
Optional<String> feedName = metadataEvent.getData().getFeedName();
Feed.State state = metadataEvent.getData().getFeedState();
if (feedName.isPresent()) {
metadataAccess.commit(() -> {
OpsManagerFeed feed = opsManagerFeedProvider.findByName(feedName.get());
if (feed != null && feed.isStream()) {
//update the job status
BatchJobExecution jobExecution = batchJobExecutionProvider.findLatestJobForFeed(feedName.get());
if (state.equals(Feed.State.ENABLED)) {
jobExecution.setStatus(BatchJobExecution.JobStatus.STARTED);
jobExecution.setExitCode(ExecutionConstants.ExitCode.EXECUTING);
jobExecution.setStartTime(DateTime.now());
} else {
jobExecution.setStatus(BatchJobExecution.JobStatus.STOPPED);
jobExecution.setExitCode(ExecutionConstants.ExitCode.COMPLETED);
jobExecution.setEndTime(DateTime.now());
}
batchJobExecutionProvider.save(jobExecution);
}
}, MetadataAccess.SERVICE);
}
}
}
}
| apache-2.0 |
jonm/chaplib | src/test/java/org/chaplib/TestDefaultUserAgentGenerator.java | 1443 | /*
* TestDefaultUserAgentGenerator.java
*
* Copyright (C) 2012 Jonathan Moore
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.chaplib;
import static org.junit.Assert.*;
import org.apache.http.HttpRequest;
import org.apache.http.client.methods.HttpGet;
import org.junit.Before;
import org.junit.Test;
public class TestDefaultUserAgentGenerator {
private DefaultUserAgentGenerator impl;
@Before
public void setUp() {
impl = new DefaultUserAgentGenerator();
}
/*
* "User agents SHOULD include this [User-Agent] field with requests."
* http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.43
*/
@Test
public void generatesAUserAgentString() {
HttpRequest request = new HttpGet("http://www.example.com/");
impl.setUserAgent(request);
assertFalse("".equals(request.getFirstHeader("User-Agent").getValue().trim()));
}
}
| apache-2.0 |
visallo/visallo | web/plugins/admin-user-tools/src/main/java/org/visallo/web/plugin/adminUserTools/AdminUserToolsWebAppPlugin.java | 1886 | package org.visallo.web.plugin.adminUserTools;
import org.visallo.webster.Handler;
import org.visallo.core.model.Description;
import org.visallo.core.model.Name;
import org.visallo.web.VisalloCsrfHandler;
import org.visallo.web.WebApp;
import org.visallo.web.WebAppPlugin;
import org.visallo.web.privilegeFilters.AdminPrivilegeFilter;
import javax.servlet.ServletContext;
@Name("Admin User Tools")
@Description("Admin tools to add/update/delete users")
public class AdminUserToolsWebAppPlugin implements WebAppPlugin {
@Override
public void init(WebApp app, ServletContext servletContext, Handler authenticationHandler) {
Class<? extends Handler> authenticationHandlerClass = authenticationHandler.getClass();
Class<? extends Handler> csrfHandlerClass = VisalloCsrfHandler.class;
app.registerJavaScript("/org/visallo/web/adminUserTools/plugin.js");
app.registerJavaScriptComponent("/org/visallo/web/adminUserTools/UserAdminPlugin.jsx");
app.registerJavaScriptComponent("/org/visallo/web/adminUserTools/WorkspaceList.jsx");
app.registerJavaScriptComponent("/org/visallo/web/adminUserTools/LoadUser.jsx");
app.registerJavaScriptComponent("/org/visallo/web/adminUserTools/UserTypeaheadInput.jsx");
app.registerJavaScriptComponent("/org/visallo/web/adminUserTools/ActiveUserList.jsx");
app.registerLess("/org/visallo/web/adminUserTools/userAdmin.less");
app.registerCss("/org/visallo/web/adminUserTools/workspaceList.css");
app.registerResourceBundle("/org/visallo/web/adminUserTools/messages.properties");
app.post("/user/delete", authenticationHandlerClass, csrfHandlerClass, AdminPrivilegeFilter.class, UserDelete.class);
app.post("/workspace/shareWithMe", authenticationHandlerClass, csrfHandlerClass, AdminPrivilegeFilter.class, WorkspaceShareWithMe.class);
}
}
| apache-2.0 |
vespa-engine/vespa | container-search/src/main/java/com/yahoo/search/query/ranking/RankFeatures.java | 5182 | // Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.search.query.ranking;
import com.yahoo.fs4.MapEncoder;
import com.yahoo.tensor.Tensor;
import com.yahoo.text.JSON;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalDouble;
/**
* Contains the rank features of a query.
*
* @author bratseth
*/
public class RankFeatures implements Cloneable {
private final Map<String, Object> features;
public RankFeatures() {
this(new LinkedHashMap<>());
}
private RankFeatures(Map<String, Object> features) {
this.features = features;
}
/** Sets a double rank feature */
public void put(String name, double value) {
features.put(name, value);
}
/** Sets a tensor rank feature */
public void put(String name, Tensor value) {
features.put(name, value);
}
/**
* Sets a rank feature to a value represented as a string.
*
* @deprecated set either a double or a tensor
*/
@Deprecated // TODO: Remove on Vespa 8
public void put(String name, String value) {
features.put(name, value);
}
/**
* Returns a rank feature as a string by full name or null if not set
*
* @deprecated use getTensor (or getDouble) instead
*/
@Deprecated // TODO: Remove on Vespa 8
public String get(String name) {
Object value = features.get(name);
if (value == null) return null;
return value.toString();
}
/** Returns this value as either a Double, Tensor or String. Returns null if the value is not set. */
public Object getObject(String name) {
return features.get(name);
}
/**
* Returns a double rank feature, or empty if there is no value with this name.
*
* @throws IllegalArgumentException if the value is set but is not a double
*/
public OptionalDouble getDouble(String name) {
Object feature = features.get(name);
if (feature == null) return OptionalDouble.empty();
if (feature instanceof Double) return OptionalDouble.of((Double)feature);
throw new IllegalArgumentException("Expected a double value of '" + name + "' but has " + feature);
}
/**
* Returns a rank feature as a tensor, or empty if there is no value with this name.
*
* @throws IllegalArgumentException if the value is a string, not a tensor or double
*/
public Optional<Tensor> getTensor(String name) {
Object feature = features.get(name);
if (feature == null) return Optional.empty();
if (feature instanceof Tensor) return Optional.of((Tensor)feature);
if (feature instanceof Double) return Optional.of(Tensor.from((Double)feature));
throw new IllegalArgumentException("Expected a tensor value of '" + name + "' but has " + feature);
}
/**
* Returns the map holding the features of this.
* This map may be modified to change the rank features of the query.
*/
public Map<String, Object> asMap() { return features; }
public boolean isEmpty() {
return features.isEmpty();
}
/**
* Prepares this for encoding, not for external use. See encode on Query for details.
* <p>
* If the query feature is found in the rank feature set,
* remove all these entries and insert them into the rank property set instead.
* We want to hide from the user that the query feature value is sent down as a rank property
* and picked up by the query feature executor in the backend.
*/
public void prepare(RankProperties rankProperties) {
if (isEmpty()) return;
List<String> featuresToRemove = new ArrayList<>();
List<String> propertiesToInsert = new ArrayList<>();
for (String key : features.keySet()) {
if (key.startsWith("query(") && key.endsWith(")")) {
featuresToRemove.add(key);
propertiesToInsert.add(key.substring("query(".length(), key.length() - 1));
} else if (key.startsWith("$")) {
featuresToRemove.add(key);
propertiesToInsert.add(key.substring(1));
}
}
for (int i = 0; i < featuresToRemove.size(); ++i) {
rankProperties.put(propertiesToInsert.get(i), features.remove(featuresToRemove.get(i)));
}
}
public int encode(ByteBuffer buffer) {
return MapEncoder.encodeMap("feature", features, buffer);
}
@Override
public boolean equals(Object other) {
if (other == this) return true;
if ( ! (other instanceof RankFeatures)) return false;
return this.features.equals(((RankFeatures)other).features);
}
@Override
public int hashCode() {
return features.hashCode();
}
@Override
public RankFeatures clone() {
return new RankFeatures(new LinkedHashMap<>(features));
}
@Override
public String toString() {
return JSON.encode(features);
}
}
| apache-2.0 |
googleapis/google-api-java-client-services | clients/google-api-services-playintegrity/v1/1.31.0/com/google/api/services/playintegrity/v1/PlayIntegrityScopes.java | 1390 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.playintegrity.v1;
/**
* Available OAuth 2.0 scopes for use with the Google Play Integrity API.
*
* @since 1.4
*/
public class PlayIntegrityScopes {
/** Private Service: https://www.googleapis.com/auth/playintegrity. */
public static final String PLAYINTEGRITY = "https://www.googleapis.com/auth/playintegrity";
/**
* Returns an unmodifiable set that contains all scopes declared by this class.
*
* @since 1.16
*/
public static java.util.Set<String> all() {
java.util.Set<String> set = new java.util.HashSet<String>();
set.add(PLAYINTEGRITY);
return java.util.Collections.unmodifiableSet(set);
}
private PlayIntegrityScopes() {
}
}
| apache-2.0 |
ruediste/rise | test-app/src/main/java/com/github/ruediste/rise/testApp/validation/AdditionalResourceKeys.java | 416 | package com.github.ruediste.rise.testApp.validation;
import org.hibernate.validator.constraints.Length;
import com.github.ruediste1.i18n.lString.AdditionalResourceKeyProvider;
public class AdditionalResourceKeys implements AdditionalResourceKeyProvider {
@Override
public void provideKeys(KeyReceiver receiver) {
receiver.addMessage(Length.class, "len must be between {min} and {max}");
}
}
| apache-2.0 |
TranscendComputing/TopStackCore | src/com/msi/tough/model/rds/RdsDbengine.java | 3022 | /*
* TopStack (c) Copyright 2012-2013 Transcend Computing, Inc.
*
* Licensed under the Apache License, Version 2.0 (the License);
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.msi.tough.model.rds;
// Generated May 10, 2011 9:13:49 AM by Hibernate Tools 3.4.0.CR1
import java.util.Date;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
/**
* RdsDbengine generated by hbm2java
*/
@Entity
@Table(name = "rds_dbengine")
public class RdsDbengine {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private long id;
private String engine;
private String engine_version;
private String dbparameterGroupFamily;
private Boolean _default;
private String next_minor_version;
private Date minor_version_release_date;
private String next_major_version;
private Date major_version_release_date;
public String getDbparameterGroupFamily() {
return dbparameterGroupFamily;
}
public Boolean getDefault_() {
return _default;
}
public String getEngine() {
return engine;
}
public String getEngineVersion() {
return engine_version;
}
public long getId() {
return id;
}
public Date getMajorVersionReleaseDate() {
return major_version_release_date;
}
public Date getMinorVersionReleaseDate() {
return minor_version_release_date;
}
public String getNextMajorVersion() {
return next_major_version;
}
public String getNextMinorVersion() {
return next_minor_version;
}
public void setDbparameterGroupFamily(String dbparameterGroupFamily) {
this.dbparameterGroupFamily = dbparameterGroupFamily;
}
public void setDefault_(Boolean _default) {
this._default = _default;
}
public void setEngine(String engine) {
this.engine = engine;
}
public void setEngineVersion(String engineVersion) {
this.engine_version = engineVersion;
}
public void setId(long id) {
this.id = id;
}
public void setMajorVersionReleaseDate(Date majorVersionReleaseDate) {
this.major_version_release_date = majorVersionReleaseDate;
}
public void setMinorVersionReleaseDate(Date minorVersionReleaseDate) {
this.minor_version_release_date = minorVersionReleaseDate;
}
public void setNextMajorVersion(String nextMajorVersion) {
this.next_major_version = nextMajorVersion;
}
public void setNextMinorVersion(String nextMinorVersion) {
this.next_minor_version = nextMinorVersion;
}
}
| apache-2.0 |
kimcy929/NotificationLater | NotificationLater/src/main/java/com/kimcy929/notificationlater/ForegroundLinearLayout.java | 6855 | /*
* Copyright (C) 2006 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.kimcy929.notificationlater;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.util.AttributeSet;
import android.view.Gravity;
import android.widget.LinearLayout;
public class ForegroundLinearLayout extends LinearLayout {
private Drawable mForeground;
private final Rect mSelfBounds = new Rect();
private final Rect mOverlayBounds = new Rect();
private int mForegroundGravity = Gravity.FILL;
protected boolean mForegroundInPadding = true;
boolean mForegroundBoundsChanged = false;
public ForegroundLinearLayout(Context context) {
super(context);
}
public ForegroundLinearLayout(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public ForegroundLinearLayout(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.ForegroundLinearLayout,
defStyle, 0);
mForegroundGravity = a.getInt(
R.styleable.ForegroundLinearLayout_android_foregroundGravity, mForegroundGravity);
final Drawable d = a.getDrawable(R.styleable.ForegroundLinearLayout_android_foreground);
if (d != null) {
setForeground(d);
}
mForegroundInPadding = a.getBoolean(
R.styleable.ForegroundLinearLayout_android_foregroundInsidePadding, true);
a.recycle();
}
/**
* Describes how the foreground is positioned.
*
* @return foreground gravity.
*
* @see #setForegroundGravity(int)
*/
public int getForegroundGravity() {
return mForegroundGravity;
}
/**
* Describes how the foreground is positioned. Defaults to START and TOP.
*
* @param foregroundGravity See {@link android.view.Gravity}
*
* @see #getForegroundGravity()
*/
public void setForegroundGravity(int foregroundGravity) {
if (mForegroundGravity != foregroundGravity) {
if ((foregroundGravity & Gravity.RELATIVE_HORIZONTAL_GRAVITY_MASK) == 0) {
foregroundGravity |= Gravity.START;
}
if ((foregroundGravity & Gravity.VERTICAL_GRAVITY_MASK) == 0) {
foregroundGravity |= Gravity.TOP;
}
mForegroundGravity = foregroundGravity;
if (mForegroundGravity == Gravity.FILL && mForeground != null) {
Rect padding = new Rect();
mForeground.getPadding(padding);
}
requestLayout();
}
}
@Override
protected boolean verifyDrawable(Drawable who) {
return super.verifyDrawable(who) || (who == mForeground);
}
@Override
public void jumpDrawablesToCurrentState() {
super.jumpDrawablesToCurrentState();
if (mForeground != null) mForeground.jumpToCurrentState();
}
@Override
protected void drawableStateChanged() {
super.drawableStateChanged();
if (mForeground != null && mForeground.isStateful()) {
mForeground.setState(getDrawableState());
}
}
/**
* Supply a Drawable that is to be rendered on top of all of the child
* views in the frame layout. Any padding in the Drawable will be taken
* into account by ensuring that the children are inset to be placed
* inside of the padding area.
*
* @param drawable The Drawable to be drawn on top of the children.
*/
public void setForeground(Drawable drawable) {
if (mForeground != drawable) {
if (mForeground != null) {
mForeground.setCallback(null);
unscheduleDrawable(mForeground);
}
mForeground = drawable;
if (drawable != null) {
setWillNotDraw(false);
drawable.setCallback(this);
if (drawable.isStateful()) {
drawable.setState(getDrawableState());
}
if (mForegroundGravity == Gravity.FILL) {
Rect padding = new Rect();
drawable.getPadding(padding);
}
} else {
setWillNotDraw(true);
}
requestLayout();
invalidate();
}
}
/**
* Returns the drawable used as the foreground of this FrameLayout. The
* foreground drawable, if non-null, is always drawn on top of the children.
*
* @return A Drawable or null if no foreground was set.
*/
public Drawable getForeground() {
return mForeground;
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
super.onLayout(changed, left, top, right, bottom);
mForegroundBoundsChanged = changed;
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
mForegroundBoundsChanged = true;
}
@Override
public void draw(Canvas canvas) {
super.draw(canvas);
if (mForeground != null) {
final Drawable foreground = mForeground;
if (mForegroundBoundsChanged) {
mForegroundBoundsChanged = false;
final Rect selfBounds = mSelfBounds;
final Rect overlayBounds = mOverlayBounds;
final int w = getRight() - getLeft();
final int h = getBottom() - getTop();
if (mForegroundInPadding) {
selfBounds.set(0, 0, w, h);
} else {
selfBounds.set(getPaddingLeft(), getPaddingTop(),
w - getPaddingRight(), h - getPaddingBottom());
}
Gravity.apply(mForegroundGravity, foreground.getIntrinsicWidth(),
foreground.getIntrinsicHeight(), selfBounds, overlayBounds);
foreground.setBounds(overlayBounds);
}
foreground.draw(canvas);
}
}
}
| apache-2.0 |
wesleykerr/robot-simulator | src/main/java/com/seekerr/simulator/robot/env/Momentum.java | 1093 | package com.seekerr.simulator.robot.env;
/**
*
* @author Wesley Kerr
*/
public class Momentum {
private double[] timeStepMomentum;
private double[] totalMomentum;
private double timeSteps;
/** Creates a new instance of Momentum */
public Momentum() {
timeStepMomentum = new double[2];
totalMomentum = new double[2];
timeSteps = 0;
}
public double[] getTotalMomentum() {
return totalMomentum;
}
public double[] getAverageMomentum() {
double[] temp = new double[2];
temp[0] = totalMomentum[0] / timeSteps;
temp[1] = totalMomentum[1] / timeSteps;
return temp;
}
public void addMomentum(double deltax, double deltay) {
timeStepMomentum[0] += deltax;
timeStepMomentum[1] += deltay;
}
public void finishTimeStep() {
timeSteps++;
totalMomentum[0] += timeStepMomentum[0];
totalMomentum[1] += timeStepMomentum[1];
timeStepMomentum[0] = 0;
timeStepMomentum[1] = 0;
}
}
| apache-2.0 |
vam-google/google-cloud-java | google-api-grpc/proto-google-cloud-scheduler-v1beta1/src/main/java/com/google/cloud/scheduler/v1beta1/DeleteJobRequest.java | 19921 | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/scheduler/v1beta1/cloudscheduler.proto
package com.google.cloud.scheduler.v1beta1;
/**
*
*
* <pre>
* Request message for deleting a job using
* [DeleteJob][google.cloud.scheduler.v1beta1.CloudScheduler.DeleteJob].
* </pre>
*
* Protobuf type {@code google.cloud.scheduler.v1beta1.DeleteJobRequest}
*/
public final class DeleteJobRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.scheduler.v1beta1.DeleteJobRequest)
DeleteJobRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeleteJobRequest.newBuilder() to construct.
private DeleteJobRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeleteJobRequest() {
name_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private DeleteJobRequest(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
name_ = s;
break;
}
default:
{
if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.scheduler.v1beta1.SchedulerProto
.internal_static_google_cloud_scheduler_v1beta1_DeleteJobRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.scheduler.v1beta1.SchedulerProto
.internal_static_google_cloud_scheduler_v1beta1_DeleteJobRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.scheduler.v1beta1.DeleteJobRequest.class,
com.google.cloud.scheduler.v1beta1.DeleteJobRequest.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
private volatile java.lang.Object name_;
/**
*
*
* <pre>
* Required.
* The job name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`.
* </pre>
*
* <code>string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required.
* The job name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`.
* </pre>
*
* <code>string name = 1;</code>
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!getNameBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getNameBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.scheduler.v1beta1.DeleteJobRequest)) {
return super.equals(obj);
}
com.google.cloud.scheduler.v1beta1.DeleteJobRequest other =
(com.google.cloud.scheduler.v1beta1.DeleteJobRequest) obj;
boolean result = true;
result = result && getName().equals(other.getName());
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.scheduler.v1beta1.DeleteJobRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.scheduler.v1beta1.DeleteJobRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.scheduler.v1beta1.DeleteJobRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.scheduler.v1beta1.DeleteJobRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.scheduler.v1beta1.DeleteJobRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.scheduler.v1beta1.DeleteJobRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.scheduler.v1beta1.DeleteJobRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.scheduler.v1beta1.DeleteJobRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.scheduler.v1beta1.DeleteJobRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.scheduler.v1beta1.DeleteJobRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.scheduler.v1beta1.DeleteJobRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.scheduler.v1beta1.DeleteJobRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.scheduler.v1beta1.DeleteJobRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for deleting a job using
* [DeleteJob][google.cloud.scheduler.v1beta1.CloudScheduler.DeleteJob].
* </pre>
*
* Protobuf type {@code google.cloud.scheduler.v1beta1.DeleteJobRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.scheduler.v1beta1.DeleteJobRequest)
com.google.cloud.scheduler.v1beta1.DeleteJobRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.scheduler.v1beta1.SchedulerProto
.internal_static_google_cloud_scheduler_v1beta1_DeleteJobRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.scheduler.v1beta1.SchedulerProto
.internal_static_google_cloud_scheduler_v1beta1_DeleteJobRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.scheduler.v1beta1.DeleteJobRequest.class,
com.google.cloud.scheduler.v1beta1.DeleteJobRequest.Builder.class);
}
// Construct using com.google.cloud.scheduler.v1beta1.DeleteJobRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
name_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.scheduler.v1beta1.SchedulerProto
.internal_static_google_cloud_scheduler_v1beta1_DeleteJobRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.scheduler.v1beta1.DeleteJobRequest getDefaultInstanceForType() {
return com.google.cloud.scheduler.v1beta1.DeleteJobRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.scheduler.v1beta1.DeleteJobRequest build() {
com.google.cloud.scheduler.v1beta1.DeleteJobRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.scheduler.v1beta1.DeleteJobRequest buildPartial() {
com.google.cloud.scheduler.v1beta1.DeleteJobRequest result =
new com.google.cloud.scheduler.v1beta1.DeleteJobRequest(this);
result.name_ = name_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return (Builder) super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.scheduler.v1beta1.DeleteJobRequest) {
return mergeFrom((com.google.cloud.scheduler.v1beta1.DeleteJobRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.scheduler.v1beta1.DeleteJobRequest other) {
if (other == com.google.cloud.scheduler.v1beta1.DeleteJobRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.scheduler.v1beta1.DeleteJobRequest parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.cloud.scheduler.v1beta1.DeleteJobRequest) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required.
* The job name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`.
* </pre>
*
* <code>string name = 1;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required.
* The job name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`.
* </pre>
*
* <code>string name = 1;</code>
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required.
* The job name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`.
* </pre>
*
* <code>string name = 1;</code>
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required.
* The job name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`.
* </pre>
*
* <code>string name = 1;</code>
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required.
* The job name. For example:
* `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`.
* </pre>
*
* <code>string name = 1;</code>
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.scheduler.v1beta1.DeleteJobRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1beta1.DeleteJobRequest)
private static final com.google.cloud.scheduler.v1beta1.DeleteJobRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.scheduler.v1beta1.DeleteJobRequest();
}
public static com.google.cloud.scheduler.v1beta1.DeleteJobRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeleteJobRequest> PARSER =
new com.google.protobuf.AbstractParser<DeleteJobRequest>() {
@java.lang.Override
public DeleteJobRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new DeleteJobRequest(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<DeleteJobRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeleteJobRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.scheduler.v1beta1.DeleteJobRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| apache-2.0 |
thrawn-sh/subversion | src/test/java/de/shadowhunt/subversion/internal/httpv1/v1_4/RepositoryResolveIT.java | 981 | /**
* Copyright © 2013-2018 shadowhunt (dev@shadowhunt.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.shadowhunt.subversion.internal.httpv1.v1_4;
import de.shadowhunt.subversion.internal.AbstractRepositoryResolveIT;
public class RepositoryResolveIT extends AbstractRepositoryResolveIT {
private static final Helper HELPER = new Helper();
public RepositoryResolveIT() {
super(HELPER.getRepositoryA(), HELPER.getTestId());
}
}
| apache-2.0 |
codelibs/cl-struts | src/test/org/apache/struts/taglib/bean/TestMessageTag1.java | 12716 | /*
* $Id: TestMessageTag1.java 54929 2004-10-16 16:38:42Z germuska $
*
* Copyright 1999-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.struts.taglib.bean;
import java.util.Locale;
import javax.servlet.jsp.PageContext;
import junit.framework.Test;
import junit.framework.TestSuite;
import org.apache.cactus.WebResponse;
import org.apache.struts.Globals;
import org.apache.struts.taglib.SimpleBeanForTesting;
import org.apache.struts.taglib.TaglibTestBase;
/**
* These tests attempt to cover every single possible configuration of the
* org.apache.struts.taglib.bean.MessageTag
*
* I've tried to describe what I'm testing as best as possible by the method names.
* To see how I'm testing, refer to the jsp file that these tests forward to.
*
* All of these tests depend on a value being correctly written on the repose, then
* checked here in endXXX method.
*
*/
public class TestMessageTag1 extends TaglibTestBase {
protected final static String TEST_KEY = "BeanKey";
protected final static String TEST_VAL = "Testing Message 1";
public TestMessageTag1(String theName) {
super(theName);
}
/**
* Start the tests.
*
* @param theArgs the arguments. Not used
*/
public static void main(String[] theArgs) {
junit.awtui.TestRunner.main(new String[] {TestMessageTag1.class.getName()});
}
/**
* @return a test suite (<code>TestSuite</code>) that includes all methods
* starting with "test"
*/
public static Test suite() {
// All methods starting with "test" will be executed in the test suite.
return new TestSuite(TestMessageTag1.class);
}
private void formatAndTest(String compare, String output) {
//fix for introduced carriage return / line feeds
output = replace(output,"\r","");
output = replace(output,"\n","");
output = output.trim();
//System.out.println("Testing [" + compare + "] == [" + output + "]");
assertEquals(compare, output);
}
private void runMyTest(String whichTest, Locale locale) throws Exception {
pageContext.setAttribute(Globals.LOCALE_KEY, locale, PageContext.SESSION_SCOPE);
request.setAttribute("runTest", whichTest);
pageContext.forward("/test/org/apache/struts/taglib/bean/TestMessageTag1.jsp");
}
/*
* ===========================================================
* Testing MessageTag (these comments serve as a divider of
* functionality being tested)
*
* Section: 1 Arg
* Locale: (default)
* ===========================================================
*/
public void testMessageTag1ArgKeyNoScopeDefaultBundle() throws Exception {
runMyTest("testMessageTag1ArgKeyNoScopeDefaultBundle", new Locale("",""));
}
public void endMessageTag1ArgKeyNoScopeDefaultBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgKeyApplicationScopeDefaultBundle() throws Exception {
runMyTest("testMessageTag1ArgKeyApplicationScopeDefaultBundle", new Locale("",""));
}
public void endMessageTag1ArgKeyApplicationScopeDefaultBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgKeySessionScopeDefaultBundle() throws Exception {
runMyTest("testMessageTag1ArgKeySessionScopeDefaultBundle", new Locale("",""));
}
public void endMessageTag1ArgKeySessionScopeDefaultBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgKeyRequestScopeDefaultBundle() throws Exception {
runMyTest("testMessageTag1ArgKeyRequestScopeDefaultBundle", new Locale("",""));
}
public void endMessageTag1ArgKeyRequestScopeDefaultBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgKeyNoScopeAlternateBundle() throws Exception {
runMyTest("testMessageTag1ArgKeyNoScopeAlternateBundle", new Locale("",""));
}
public void endMessageTag1ArgKeyNoScopeAlternateBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgKeyApplicationScopeAlternateBundle() throws Exception {
runMyTest("testMessageTag1ArgKeyApplicationScopeAlternateBundle", new Locale("",""));
}
public void endMessageTag1ArgKeyApplicationScopeAlternateBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgKeySessionScopeAlternateBundle() throws Exception {
runMyTest("testMessageTag1ArgKeySessionScopeAlternateBundle", new Locale("",""));
}
public void endMessageTag1ArgKeySessionScopeAlternateBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgKeyRequestScopeAlternateBundle() throws Exception {
runMyTest("testMessageTag1ArgKeyRequestScopeAlternateBundle", new Locale("",""));
}
public void endMessageTag1ArgKeyRequestScopeAlternateBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgNameNoScopeDefaultBundle() throws Exception {
runMyTest("testMessageTag1ArgNameNoScopeDefaultBundle", new Locale("",""));
}
public void endMessageTag1ArgNameNoScopeDefaultBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgNameApplicationScopeDefaultBundle() throws Exception {
runMyTest("testMessageTag1ArgNameApplicationScopeDefaultBundle", new Locale("",""));
}
public void endMessageTag1ArgNameApplicationScopeDefaultBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgNameSessionScopeDefaultBundle() throws Exception {
runMyTest("testMessageTag1ArgNameSessionScopeDefaultBundle", new Locale("",""));
}
public void endMessageTag1ArgNameSessionScopeDefaultBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgNameRequestScopeDefaultBundle() throws Exception {
runMyTest("testMessageTag1ArgNameRequestScopeDefaultBundle", new Locale("",""));
}
public void endMessageTag1ArgNameRequestScopeDefaultBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgNameNoScopeAlternateBundle() throws Exception {
runMyTest("testMessageTag1ArgNameNoScopeAlternateBundle", new Locale("",""));
}
public void endMessageTag1ArgNameNoScopeAlternateBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgNameApplicationScopeAlternateBundle() throws Exception {
runMyTest("testMessageTag1ArgNameApplicationScopeAlternateBundle", new Locale("",""));
}
public void endMessageTag1ArgNameApplicationScopeAlternateBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgNameSessionScopeAlternateBundle() throws Exception {
runMyTest("testMessageTag1ArgNameSessionScopeAlternateBundle", new Locale("",""));
}
public void endMessageTag1ArgNameSessionScopeAlternateBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgNameRequestScopeAlternateBundle() throws Exception {
runMyTest("testMessageTag1ArgNameRequestScopeAlternateBundle", new Locale("",""));
}
public void endMessageTag1ArgNameRequestScopeAlternateBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgNamePropertyNoScopeDefaultBundle() throws Exception {
pageContext.setAttribute("key", new SimpleBeanForTesting("default.bundle.message.1"), PageContext.REQUEST_SCOPE);
runMyTest("testMessageTag1ArgNamePropertyNoScopeDefaultBundle", new Locale("",""));
}
public void endMessageTag1ArgNamePropertyNoScopeDefaultBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgNamePropertyApplicationScopeDefaultBundle() throws Exception {
pageContext.setAttribute("key", new SimpleBeanForTesting("default.bundle.message.1"), PageContext.APPLICATION_SCOPE);
runMyTest("testMessageTag1ArgNamePropertyApplicationScopeDefaultBundle", new Locale("",""));
}
public void endMessageTag1ArgNamePropertyApplicationScopeDefaultBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgNamePropertySessionScopeDefaultBundle() throws Exception {
pageContext.setAttribute("key", new SimpleBeanForTesting("default.bundle.message.1"), PageContext.SESSION_SCOPE);
runMyTest("testMessageTag1ArgNamePropertySessionScopeDefaultBundle", new Locale("",""));
}
public void endMessageTag1ArgNamePropertySessionScopeDefaultBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgNamePropertyRequestScopeDefaultBundle() throws Exception {
pageContext.setAttribute("key", new SimpleBeanForTesting("default.bundle.message.1"), PageContext.REQUEST_SCOPE);
runMyTest("testMessageTag1ArgNamePropertyRequestScopeDefaultBundle", new Locale("",""));
}
public void endMessageTag1ArgNamePropertyRequestScopeDefaultBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgNamePropertyNoScopeAlternateBundle() throws Exception {
pageContext.setAttribute("key", new SimpleBeanForTesting("alternate.bundle.message.1"), PageContext.REQUEST_SCOPE);
runMyTest("testMessageTag1ArgNamePropertyNoScopeAlternateBundle", new Locale("",""));
}
public void endMessageTag1ArgNamePropertyNoScopeAlternateBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgNamePropertyApplicationScopeAlternateBundle() throws Exception {
pageContext.setAttribute("key", new SimpleBeanForTesting("alternate.bundle.message.1"), PageContext.APPLICATION_SCOPE);
runMyTest("testMessageTag1ArgNamePropertyApplicationScopeAlternateBundle", new Locale("",""));
}
public void endMessageTag1ArgNamePropertyApplicationScopeAlternateBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgNamePropertySessionScopeAlternateBundle() throws Exception {
pageContext.setAttribute("key", new SimpleBeanForTesting("alternate.bundle.message.1"), PageContext.SESSION_SCOPE);
runMyTest("testMessageTag1ArgNamePropertySessionScopeAlternateBundle", new Locale("",""));
}
public void endMessageTag1ArgNamePropertySessionScopeAlternateBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
public void testMessageTag1ArgNamePropertyRequestScopeAlternateBundle() throws Exception {
pageContext.setAttribute("key", new SimpleBeanForTesting("alternate.bundle.message.1"), PageContext.REQUEST_SCOPE);
runMyTest("testMessageTag1ArgNamePropertyRequestScopeAlternateBundle", new Locale("",""));
}
public void endMessageTag1ArgNamePropertyRequestScopeAlternateBundle(WebResponse response){
formatAndTest(TEST_VAL, response.getText());
}
}
| apache-2.0 |
inbloom/secure-data-service | sli/data-access/dal/src/main/java/org/slc/sli/ingestion/model/NewBatchJob.java | 9720 | /*
* Copyright 2012-2013 inBloom, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.slc.sli.ingestion.model;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import org.slc.sli.ingestion.BatchJobStageType;
import org.slc.sli.ingestion.FileFormat;
import org.slc.sli.ingestion.FileType;
import org.slc.sli.ingestion.Job;
import org.slc.sli.ingestion.landingzone.IngestionFileEntry;
import org.slc.sli.ingestion.util.BatchJobUtils;
import org.springframework.data.annotation.Id;
import org.springframework.data.mongodb.core.mapping.Document;
/**
* Model for ingestion jobs.
*
* @author dduran
*
*/
@Document
public class NewBatchJob implements Job {
private Date jobStartTimestamp;
@Id
private String id;
private String tenantId;
private String sourceId;
private String topLevelSourceId;
private String status;
private int totalFiles;
private Map<String, String> batchProperties;
private List<Stage> stages;
private List<ResourceEntry> resourceEntries;
private Date jobStopTimestamp;
private void putNonNull(HashMap map, String key, Object o)
{
if (o != null) { map.put(key, o); }
}
// mongoTemplate requires this constructor.
public NewBatchJob() {
this.batchProperties = new HashMap<String, String>();
this.stages = new LinkedList<Stage>();
this.resourceEntries = new LinkedList<ResourceEntry>();
initStartTime();
}
public NewBatchJob(String id, String tenantId) {
this.id = id;
this.tenantId = tenantId;
this.batchProperties = new HashMap<String, String>();
this.stages = new LinkedList<Stage>();
this.resourceEntries = new LinkedList<ResourceEntry>();
initStartTime();
}
public NewBatchJob(String id, String sourceId, String status, int totalFiles, Map<String, String> batchProperties,
List<Stage> listOfStages, List<ResourceEntry> resourceEntries) {
this.id = id;
this.sourceId = sourceId;
this.topLevelSourceId = deriveTopLevelSourceId(sourceId);
this.status = status;
this.totalFiles = totalFiles;
if (batchProperties != null) {
this.batchProperties = batchProperties;
}
this.stages = new LinkedList<Stage>();
if (listOfStages != null) {
this.stages = listOfStages;
}
this.resourceEntries = new LinkedList<ResourceEntry>();
if (resourceEntries != null) {
this.resourceEntries = resourceEntries;
}
initStartTime();
}
private void initStartTime() {
jobStartTimestamp = BatchJobUtils.getCurrentTimeStamp();
}
public void stop() {
jobStopTimestamp = BatchJobUtils.getCurrentTimeStamp();
}
/**
* generates a new unique ID
*/
public static String createId(String filename) {
if (filename == null) {
return UUID.randomUUID().toString();
} else {
return filename + "-" + UUID.randomUUID().toString();
}
}
@Override
public String getTenantId() {
return tenantId;
}
public void setTenantId(String tenantId) {
this.tenantId = tenantId;
}
@Override
public String getId() {
return id;
}
@Override
public String getProperty(String key) {
return batchProperties.get(key);
}
@Override
public String getProperty(String key, String defaultValue) {
String value = batchProperties.get(key);
if (value == null) {
value = defaultValue;
}
return value;
}
@Override
public Set<String> propertyNames() {
return batchProperties.keySet();
}
@Override
public void setProperty(String name, String value) {
batchProperties.put(name, value);
}
public String getSourceId() {
return sourceId;
}
public void setSourceId(String sourceId) {
this.sourceId = sourceId;
}
private String deriveTopLevelSourceId(String sourceId) {
String derivedTopLevelSourceId = sourceId;
int index = sourceId.indexOf("unzip");
if (index != -1) {
derivedTopLevelSourceId = sourceId.substring(0, index);
}
return derivedTopLevelSourceId;
}
public String getTopLevelSourceId() {
return topLevelSourceId;
}
public void setTopLevelSourceId(String topLevelSourceId) {
this.topLevelSourceId = topLevelSourceId;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public int getTotalFiles() {
return totalFiles;
}
public void setTotalFiles(int totalFiles) {
this.totalFiles = totalFiles;
}
public Map<String, String> getBatchProperties() {
return batchProperties;
}
public void setBatchProperties(Map<String, String> batchProperties) {
this.batchProperties = batchProperties;
}
public Date getJobStartTimestamp() {
return new Date(jobStartTimestamp.getTime());
}
public Date getJobStopTimestamp() {
return new Date(jobStopTimestamp.getTime());
}
public List<ResourceEntry> getResourceEntries() {
return resourceEntries;
}
public synchronized void addResourceEntry(ResourceEntry resourceEntry) {
if (this.resourceEntries == null) {
this.resourceEntries = new LinkedList<ResourceEntry>();
}
this.resourceEntries.add(resourceEntry);
}
/**
* Method to return the ResourceEntry for a given resourceId
* returns null if no matching entry is found
*
* @param resourceId
*/
public ResourceEntry getResourceEntry(String resourceId) {
if (resourceId != null) {
for (ResourceEntry entry : this.getResourceEntries()) {
if (resourceId.equals(entry.getResourceId())) {
return entry;
}
}
} else {
throw new IllegalArgumentException("Cannot get resource for null resourceId");
}
return null;
}
public List<ResourceEntry> getNeutralRecordResourceForType(FileType fileType) {
List<ResourceEntry> nrResourcesForType = new ArrayList<ResourceEntry>();
if (fileType != null) {
for (ResourceEntry entry : this.getResourceEntries()) {
if (FileFormat.NEUTRALRECORD.getCode().equals(entry.getResourceFormat())
&& fileType.getName().equals(entry.getResourceType())) {
nrResourcesForType.add(entry);
}
}
}
return nrResourcesForType;
}
/**
* Method to return the List of metrics for a given stageType
* returns null if no matching metrics are found
*
* @param stageType
*/
public List<Metrics> getStageMetrics(BatchJobStageType stageType) {
List<Metrics> m = new LinkedList<Metrics>();
for (Stage s : this.stages) {
if (stageType.getName().equals(s.getStageName())) {
m.addAll(s.getMetrics());
}
}
if (m.size() > 0) {
return m;
}
return Collections.emptyList();
}
/**
* adds stage to this NewBatchJob instance
*
* @param stage
*/
public void addStage(Stage stage) {
this.stages.add(stage);
}
@Override
public List<IngestionFileEntry> getFiles() {
List<IngestionFileEntry> ingestionFileEntries = new ArrayList<IngestionFileEntry>();
// create IngestionFileEntry items from eligible ResourceEntry items
for (ResourceEntry resourceEntry : resourceEntries) {
FileFormat fileFormat = FileFormat.findByCode(resourceEntry.getResourceFormat());
if (fileFormat != null && resourceEntry.getResourceType() != null) {
FileType fileType = FileType.findByNameAndFormat(resourceEntry.getResourceType(), fileFormat);
if (fileType != null) {
IngestionFileEntry ingestionFileEntry = new IngestionFileEntry(resourceEntry.getResourceZipParent(), fileFormat, fileType,
resourceEntry.getResourceId(), resourceEntry.getChecksum());
ingestionFileEntry.setBatchJobId(id);
ingestionFileEntries.add(ingestionFileEntry);
}
}
}
return ingestionFileEntries;
}
public ResourceEntry getZipResourceEntry() {
ResourceEntry zipResourceEntry = null;
for (ResourceEntry resourceEntry : resourceEntries) {
if (FileFormat.ZIP_FILE.getCode().equalsIgnoreCase(resourceEntry.getResourceFormat())) {
zipResourceEntry = resourceEntry;
break;
}
}
return zipResourceEntry;
}
}
| apache-2.0 |
MobileTribe/pandroid | pandroid-annotations/src/main/java/com/leroymerlin/pandroid/annotations/BindLifeCycleDelegate.java | 249 | package com.leroymerlin.pandroid.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Target;
@Target(ElementType.FIELD)
public @interface BindLifeCycleDelegate {
String BINDER_PREFIX = "_LifecycleAutoBinder";
}
| apache-2.0 |
nickpan47/samza | samza-kafka/src/test/java/org/apache/samza/system/kafka/TestKafkaSystemAdminJava.java | 5993 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.system.kafka;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import org.apache.samza.system.StreamSpec;
import org.apache.samza.system.StreamValidationException;
import org.apache.samza.system.SystemAdmin;
import org.apache.samza.util.Util;
import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
import static org.junit.Assert.*;
public class TestKafkaSystemAdminJava extends TestKafkaSystemAdmin {
KafkaSystemAdmin basicSystemAdmin = createSystemAdmin();
@Test
public void testCreateCoordinatorStreamDelegatesToCreateStream() {
KafkaSystemAdmin systemAdmin = createSystemAdmin();//coordProps, 3, new scala.collection.immutable.HashMap<>(), 1000);
SystemAdmin admin = Mockito.spy(systemAdmin);
StreamSpec spec = new StreamSpec("testId", "testCoordinatorStream", "testSystem");
admin.createCoordinatorStream(spec.getPhysicalName());
admin.validateStream(spec);
Mockito.verify(admin).createStream(Mockito.any());
}
@Test
public void testCreateChangelogStreamDelegatesToCreateStream() {
final String STREAM = "testChangeLogStream";
final int PARTITIONS = 12;
final int REP_FACTOR = 3;
Properties coordProps = new Properties();
Properties changeLogProps = new Properties();
changeLogProps.setProperty("cleanup.policy", "compact");
changeLogProps.setProperty("segment.bytes", "139");
Map<String, ChangelogInfo> changeLogMap = new HashMap<>();
changeLogMap.put(STREAM, new ChangelogInfo(REP_FACTOR, changeLogProps));
SystemAdmin admin = Mockito.spy(createSystemAdmin(coordProps, 3, Util.javaMapAsScalaMap(changeLogMap)));
StreamSpec spec = new StreamSpec(STREAM, STREAM, SYSTEM(), PARTITIONS);
admin.createChangelogStream(STREAM, PARTITIONS);
admin.validateStream(spec);
ArgumentCaptor<StreamSpec> specCaptor = ArgumentCaptor.forClass(StreamSpec.class);
Mockito.verify(admin).createStream(specCaptor.capture());
StreamSpec internalSpec = specCaptor.getValue();
assertTrue(internalSpec instanceof KafkaStreamSpec); // KafkaStreamSpec is used to carry replication factor
assertEquals(STREAM, internalSpec.getId());
assertEquals(SYSTEM(), internalSpec.getSystemName());
assertEquals(STREAM, internalSpec.getPhysicalName());
assertEquals(REP_FACTOR, ((KafkaStreamSpec) internalSpec).getReplicationFactor());
assertEquals(PARTITIONS, internalSpec.getPartitionCount());
assertEquals(changeLogProps, ((KafkaStreamSpec) internalSpec).getProperties());
}
@Test
public void testValidateChangelogStreamDelegatesToValidateStream() {
final String STREAM = "testChangeLogValidate";
Properties coordProps = new Properties();
Map<String, ChangelogInfo> changeLogMap = new HashMap<>();
changeLogMap.put(STREAM, new ChangelogInfo(3, new Properties()));
KafkaSystemAdmin systemAdmin = createSystemAdmin(coordProps, 3, Util.javaMapAsScalaMap(changeLogMap));
SystemAdmin admin = Mockito.spy(systemAdmin);
StreamSpec spec = new StreamSpec("testId", STREAM, "testSystem", 12);
admin.createChangelogStream(spec.getPhysicalName(), spec.getPartitionCount());
admin.validateStream(spec);
admin.validateChangelogStream(spec.getPhysicalName(), spec.getPartitionCount());
Mockito.verify(admin).createStream(Mockito.any());
Mockito.verify(admin, Mockito.times(3)).validateStream(Mockito.any());
}
@Test
public void testCreateStream() {
SystemAdmin admin = this.basicSystemAdmin;
StreamSpec spec = new StreamSpec("testId", "testStream", "testSystem", 8);
assertTrue("createStream should return true if the stream does not exist and then is created.", admin.createStream(spec));
admin.validateStream(spec);
assertFalse("createStream should return false if the stream already exists.", admin.createStream(spec));
}
@Test(expected = StreamValidationException.class)
public void testValidateStreamDoesNotExist() {
SystemAdmin admin = this.basicSystemAdmin;
StreamSpec spec = new StreamSpec("testId", "testStreamNameExist", "testSystem", 8);
admin.validateStream(spec);
}
@Test(expected = StreamValidationException.class)
public void testValidateStreamWrongPartitionCount() {
SystemAdmin admin = this.basicSystemAdmin;
StreamSpec spec1 = new StreamSpec("testId", "testStreamPartition", "testSystem", 8);
StreamSpec spec2 = new StreamSpec("testId", "testStreamPartition", "testSystem", 4);
assertTrue("createStream should return true if the stream does not exist and then is created.", admin.createStream(spec1));
admin.validateStream(spec2);
}
@Test(expected = StreamValidationException.class)
public void testValidateStreamWrongName() {
SystemAdmin admin = this.basicSystemAdmin;
StreamSpec spec1 = new StreamSpec("testId", "testStreamName1", "testSystem", 8);
StreamSpec spec2 = new StreamSpec("testId", "testStreamName2", "testSystem", 8);
assertTrue("createStream should return true if the stream does not exist and then is created.", admin.createStream(spec1));
admin.validateStream(spec2);
}
}
| apache-2.0 |
ccliu2015/love | app/src/main/java/com/wisedu/scc/love/widget/audio/IMediaManager.java | 1441 | package com.wisedu.scc.love.widget.audio;
import android.media.MediaPlayer;
import android.media.MediaRecorder;
/**
* Interface containing media management methods.
*/
public interface IMediaManager {
/**
* Records a file.
*/
void recordGreeting(String fileName);
/**
* Plays saved greeting.
*/
void playGreeting(String fileName, boolean isRestartRequired);
/**
* Stops recording process.
*/
void stopRecording();
/**
* Stops greeting playback process.
*/
void stopPlayback();
/**
* Pauses greeting playback.
*/
void pausePlayback();
/**
* Gets playback duration.
*
* @return ms of the file to be played.
*/
int getPlaybackDuration();
/**
* Current position of greeting being played.
*
* @return the current position in milliseconds
*/
int getCurrentPlaybackPosition();
/**
* Sets current playback position.
*
* @param progress
*/
void setPlayPosition(int progress);
/**
* Gets current media player object.
*
* @return MediaRecorder
*/
MediaPlayer getMediaPlayer();
/**
* Gets current MediaRecorder object.
*
* @return MediaRecorder
*/
MediaRecorder getMediaRecorder();
boolean isPlaying();
void setOnMediaEventListener(OnMediaEventListener onMediaEventListener);
public static interface OnMediaEventListener {
public void onMediaRecordCompletion();
public void onMediaPlayError();
public void onMediaRecordError();
}
}
| apache-2.0 |
mbe24/jcurry | src/main/java/org/beyene/jcurry/function/Function4.java | 1358 | /*
* Copyright 2014 Mikael Beyene
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.beyene.jcurry.function;
import org.beyene.jcurry.function.wrap.CommonExecutable;
public final class Function4<T, E extends Exception, P1, P2, P3, P4> extends
AbstractFunction<P4, Function3<T, E, P1, P2, P3>, T, E> {
protected Function4(CommonExecutable<T, E> executable,
Object[] args) {
super(executable, args);
}
public Function4(CommonExecutable<T, E> executable) {
super(executable, 4);
}
public Function3<T, E, P1, P2, P3> p4(P4 t) {
return apply(t);
}
@Override
protected Function3<T, E, P1, P2, P3> lof(CommonExecutable<T, E> function, Object[] parameters) {
return new Function3<>(function, parameters);
}
@Override
protected int argPos() {
return 3;
}
} | apache-2.0 |
ST-DDT/CrazyCore | src/main/java/de/st_ddt/crazyplugin/comparator/CrazyPluginNameComparator.java | 318 | package de.st_ddt.crazyplugin.comparator;
import de.st_ddt.crazyplugin.CrazyPluginInterface;
public class CrazyPluginNameComparator implements CrazyPluginComparator
{
@Override
public int compare(final CrazyPluginInterface o1, final CrazyPluginInterface o2)
{
return o1.getName().compareTo(o2.getName());
}
}
| apache-2.0 |
bushuyev/niomongo | src/test/java/com/niomongo/conversion/json2bson/Catalog.java | 987 | package com.niomongo.conversion.json2bson;
import com.niomongo.conversion.custom.ClassMeta;
import com.niomongo.conversion.custom.Meta;
import com.niomongo.conversion.custom.Size;
import com.niomongo.conversion.custom.Strict;
import com.niomongo.conversion.json.NameLiteralValue;
import com.niomongo.conversion.json.NameObjectValue;
import com.niomongo.conversion.json.NameStringValue;
import com.niomongo.processing.common.UsersObject;
import java.nio.ByteBuffer;
/**
* Created with IntelliJ IDEA.
* User: Yevgen Bushuyev
* Date: 12/25/14
* Time: 4:27 PM
*/
@Strict
public class Catalog extends UsersObject {
public static final ClassMeta meta = ClassMeta.findMeta(Catalog.class);
public Catalog() {
}
public Catalog(ByteBuffer buffer) {
super(buffer);
}
@Meta(String.class)
@Size(min = 5, max = 100)
public NameStringValue name;
@Meta(Boolean.class)
public NameLiteralValue ready;
//
// @Meta(Branch.class)
// public NameObjectValue rootBranch;
}
| apache-2.0 |
alexxiyang/jdbctemplatetool | src/main/java/org/crazycake/jdbcTemplateTool/exception/NoDefinedGetterException.java | 420 | package org.crazycake.jdbcTemplateTool.exception;
public class NoDefinedGetterException extends Exception {
private String fieldName;
public NoDefinedGetterException(String fieldName){
super(fieldName + " should have an getter method.");
this.fieldName = fieldName;
}
public String getFieldName() {
return fieldName;
}
public void setFieldName(String fieldName) {
this.fieldName = fieldName;
}
}
| apache-2.0 |
Haldir65/AndroidRepo | _001_socket_transportation/app/src/main/java/com/me/harris/socketio/MainActivity.java | 2736 | package com.me.harris.socketio;
import android.os.Bundle;
import android.support.design.widget.FloatingActionButton;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
public class MainActivity extends AppCompatActivity implements View.OnClickListener {
Button mBtnSend;
Button mBtnDisconnect;
EditText mEditText;
TCPClient mClient;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fab);
mBtnSend = findViewById(R.id.button);
mBtnDisconnect = findViewById(R.id.button2);
mEditText = findViewById(R.id.editText);
mBtnDisconnect.setOnClickListener(this);
mBtnSend.setOnClickListener(this);
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
// Snackbar.make(view, "Replace with your own action", Snackbar.LENGTH_LONG)
// .setAction("Action", null).show();
}
});
mClient = new TCPClient();
mClient.init(); // 维持一个tcp连接
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.button:
sendMessage();
break;
case R.id.button2:
disconnect();
break;
default:
break;
}
}
private void disconnect() {
mClient.sendMsg("bye");
}
private void sendMessage() {
mClient.sendMsg(mEditText.getText().toString());
}
}
| apache-2.0 |
kanta12937/mywork | TripList/src/com/ftfl/triplist/database/PlaceListDataSource.java | 6221 | package com.ftfl.triplist.database;
import java.util.ArrayList;
import com.ftfl.triplist.modelclass.*;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.SQLException;
import android.database.sqlite.SQLiteDatabase;
import android.util.Log;
public class PlaceListDataSource {
private SQLiteDatabase db;
private DBHelper dbHelper;
public PlaceListDataSource(Context context) {
dbHelper = new DBHelper(context);
}
public void open() throws SQLException {
db = dbHelper.getWritableDatabase();
}
public void close() {
dbHelper.close();
}
// Adding new
public long addNewPlace(PlaceModelClass place) {
open();
ContentValues values = new ContentValues();
values.put(DBHelper.KEY_NAME, place.getmName());
values.put(DBHelper.KEY_PURPOSE, place.getmPurpose());
values.put(DBHelper.KEY_ADDRESS, place.getmAddress());
values.put(DBHelper.KEY_DISTRICT, place.getmDistrict());
values.put(DBHelper.KEY_LATITUDE, place.getmLatitude());
values.put(DBHelper.KEY_LOGITUDE, place.getmLongitude());
values.put(DBHelper.KEY_START_DATE, place.getmStartDate());
values.put(DBHelper.KEY_END_DATE, place.getmEndDate());
values.put(DBHelper.KEY_NOTE, place.getmNote());
values.put(DBHelper.KEY_PHOTO, place.getmImage());
long inserted = db.insert(DBHelper.TABLE_NAME, null, values);
close();
return inserted;
}
// delete data form database.
public boolean deleteData(Integer eId) {
this.open();
try {
db.delete(DBHelper.TABLE_NAME, DBHelper.KEY_ID + "=" + eId, null);
} catch (Exception ex) {
Log.e("ERROR", "data not deleted");
return false;
}
this.close();
return true;
}
// update database by Id
public long updateData(Integer id, PlaceModelClass place) {
open();
ContentValues values = new ContentValues();
values.put(DBHelper.KEY_NAME, place.getmName());
values.put(DBHelper.KEY_PURPOSE, place.getmPurpose());
values.put(DBHelper.KEY_ADDRESS, place.getmAddress());
values.put(DBHelper.KEY_DISTRICT, place.getmDistrict());
values.put(DBHelper.KEY_LATITUDE, place.getmLatitude());
values.put(DBHelper.KEY_LOGITUDE, place.getmLongitude());
values.put(DBHelper.KEY_START_DATE, place.getmStartDate());
values.put(DBHelper.KEY_END_DATE, place.getmEndDate());
values.put(DBHelper.KEY_NOTE, place.getmNote());
long updated = 0;
try {
updated = db.update(DBHelper.TABLE_NAME, values, DBHelper.KEY_ID
+ "=" + id, null);
} catch (Exception ex) {
Log.e("ERROR", "data upgraion problem");
}
close();
return updated;
}
// Getting All place list
public ArrayList<PlaceModelClass> getPlaceList() {
ArrayList<PlaceModelClass> place_list = new ArrayList<PlaceModelClass>();
open();
Cursor cursor = db.query(DBHelper.TABLE_NAME, null, null, null, null,
null, null);
// looping through all rows and adding to list
if (cursor != null && cursor.getCount() > 0) {
cursor.moveToFirst();
for (int i = 0; i < cursor.getCount(); i++) {
int id = cursor.getInt(cursor.getColumnIndex(DBHelper.KEY_ID));
String name = cursor.getString(cursor
.getColumnIndex(DBHelper.KEY_NAME));
String purpose = cursor.getString(cursor
.getColumnIndex(DBHelper.KEY_PURPOSE));
String address = cursor.getString(cursor
.getColumnIndex(DBHelper.KEY_ADDRESS));
String district = cursor.getString(cursor
.getColumnIndex(DBHelper.KEY_DISTRICT));
String latitude = cursor.getString(cursor
.getColumnIndex(DBHelper.KEY_LATITUDE));
String longitude = cursor.getString(cursor
.getColumnIndex(DBHelper.KEY_LOGITUDE));
String startDate = cursor.getString(cursor
.getColumnIndex(DBHelper.KEY_START_DATE));
String endDate = cursor.getString(cursor
.getColumnIndex(DBHelper.KEY_END_DATE));
String note = cursor.getString(cursor
.getColumnIndex(DBHelper.KEY_NOTE));
String photo = cursor.getString(cursor
.getColumnIndex(DBHelper.KEY_PHOTO));
PlaceModelClass place = new PlaceModelClass(id, name, purpose,
address, district, latitude, longitude, startDate,
endDate, note, photo);
place_list.add(place);
cursor.moveToNext();
}
}
cursor.close();
db.close();
// return place list
return place_list;
}
// Getting All place detail
public PlaceModelClass getDetail(int id) {
PlaceModelClass place_detail;
open();
String selectQuery = "SELECT * FROM " + DBHelper.TABLE_NAME
+ " WHERE " + DBHelper.KEY_ID + "=" + id;
Cursor cursor = db.rawQuery(selectQuery, null);
cursor.moveToFirst();
String name = cursor
.getString(cursor.getColumnIndex(DBHelper.KEY_NAME));
String purpose = cursor.getString(cursor
.getColumnIndex(DBHelper.KEY_PURPOSE));
String address = cursor.getString(cursor
.getColumnIndex(DBHelper.KEY_ADDRESS));
String district = cursor.getString(cursor
.getColumnIndex(DBHelper.KEY_DISTRICT));
String latitude = cursor.getString(cursor
.getColumnIndex(DBHelper.KEY_LATITUDE));
String longitude = cursor.getString(cursor
.getColumnIndex(DBHelper.KEY_LOGITUDE));
String startDate = cursor.getString(cursor
.getColumnIndex(DBHelper.KEY_START_DATE));
String endDate = cursor.getString(cursor
.getColumnIndex(DBHelper.KEY_END_DATE));
String note = cursor
.getString(cursor.getColumnIndex(DBHelper.KEY_NOTE));
String photo = cursor.getString(cursor
.getColumnIndex(DBHelper.KEY_PHOTO));
place_detail = new PlaceModelClass(id, name, purpose, address,
district, latitude, longitude, startDate, endDate, note, photo);
cursor.moveToNext();
cursor.close();
db.close();
// return place detail
return place_detail;
}
public boolean isEmpty() {
this.open();
Cursor cursor = db.query(DBHelper.TABLE_NAME, new String[] {
DBHelper.KEY_ID, DBHelper.KEY_NAME, DBHelper.KEY_PURPOSE,
DBHelper.KEY_ADDRESS, DBHelper.KEY_DISTRICT,
DBHelper.KEY_LATITUDE, DBHelper.KEY_LOGITUDE,
DBHelper.KEY_START_DATE, DBHelper.KEY_END_DATE,
DBHelper.KEY_NOTE, DBHelper.KEY_PHOTO, }, null, null, null,
null, null);
if (cursor.getCount() == 0) {
this.close();
return true;
} else {
this.close();
return false;
}
}
}
| apache-2.0 |
igarashitm/switchyard | core/admin/src/main/java/org/switchyard/admin/base/SwitchYardBuilder.java | 6206 | /*
* Copyright 2013 Red Hat Inc. and/or its affiliates and other contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.switchyard.admin.base;
import java.util.EventObject;
import javax.xml.namespace.QName;
import org.switchyard.Exchange;
import org.switchyard.admin.Application;
import org.switchyard.admin.ComponentReference;
import org.switchyard.admin.Reference;
import org.switchyard.admin.Service;
import org.switchyard.admin.SwitchYard;
import org.switchyard.admin.mbean.internal.LocalManagement;
import org.switchyard.admin.mbean.internal.MBeans;
import org.switchyard.deploy.ComponentNames;
import org.switchyard.deploy.ServiceDomainManager;
import org.switchyard.deploy.event.ApplicationDeployedEvent;
import org.switchyard.deploy.event.ApplicationUndeployedEvent;
import org.switchyard.deploy.internal.AbstractDeployment;
import org.switchyard.event.EventObserver;
import org.switchyard.runtime.event.ExchangeCompletionEvent;
/**
* SwitchYardBuilder
*
* {@link EventObserver} implementation which builds a
* {@link org.switchyard.admin.SwitchYard} model using notifications from
* {@link AbstractDeployment} events.
*
* @author Rob Cernich
*/
public class SwitchYardBuilder implements EventObserver {
private BaseSwitchYard _switchYard;
private ServiceDomainManager _domainManager;
/**
* Create a new SwitchYardBuilder.
*/
public SwitchYardBuilder() {
_switchYard = new BaseSwitchYard();
}
/**
* Initializes the SwitchBuilder which includes registering the local management MBean
* and registering as an EventObserver within SwitchYard.
* @param domainManager the SY ServiceDomainManager
*/
public void init(ServiceDomainManager domainManager) {
_domainManager = domainManager;
// Register local management MBeans
LocalManagement lm = new LocalManagement(_domainManager);
MBeans.registerLocalManagement(lm);
// Register event hooks
_domainManager.getEventManager()
.addObserver(this, ExchangeCompletionEvent.class)
.addObserver(this, ApplicationDeployedEvent.class)
.addObserver(this, ApplicationUndeployedEvent.class);
}
/**
* Tears down registered MBeans and event subscriptions. Call this during system shutdown
* to clean up.
*/
public void destroy() {
// Unregister event hooks
_domainManager.getEventManager().removeObserver(this);
// Unregister management mbeans
MBeans.unregisterLocalManagement();
}
/**
* Returns the SwitchYard admin object.
* @return SwitchYard interface representing the SY runtime
*/
public SwitchYard getSwitchYard() {
return _switchYard;
}
/**
* Returns the ServiceDomainManager instance in use for this builder.
* @return ServiceDomainManager used by this builder instance.
*/
public ServiceDomainManager getDomainManager() {
return _domainManager;
}
@Override
public void notify(EventObject event) {
if (event instanceof ApplicationDeployedEvent) {
applicationDeployed((ApplicationDeployedEvent)event);
} else if (event instanceof ApplicationUndeployedEvent) {
applicationUndeployed((ApplicationUndeployedEvent)event);
} else if (event instanceof ExchangeCompletionEvent) {
exchangeCompleted((ExchangeCompletionEvent)event);
}
}
void applicationDeployed(ApplicationDeployedEvent event) {
AbstractDeployment deployment = event.getDeployment();
if (deployment.getName() != null) {
BaseApplication app = new BaseApplication(deployment);
_switchYard.addApplication(app);
MBeans.registerApplication(app);
}
}
void applicationUndeployed(ApplicationUndeployedEvent event) {
AbstractDeployment deployment = event.getDeployment();
if (deployment.getName() != null) {
Application app = _switchYard.getApplication(deployment.getName());
if (app != null) {
MBeans.unregisterApplication(app);
_switchYard.removeApplication(deployment.getName());
}
}
}
void exchangeCompleted(ExchangeCompletionEvent event) {
// Recording metrics at multiple levels at this point instead of
// aggregating them.
Exchange exchange = event.getExchange();
QName serviceName = exchange.getProvider().getName();
QName referenceName = ComponentNames.unqualify(exchange.getConsumer().getName());
for (Service service : _switchYard.getServices()) {
if (service.getName().equals(serviceName)) {
// 1 - the aggregate switchyard stats
_switchYard.recordMetrics(exchange);
// 2 - service stats
service.recordMetrics(exchange);
}
// 3 - reference stats
// XXX: this looks like it lumps the stats into every component reference with a matching name
for (ComponentReference reference : service.getPromotedService().getReferences()) {
if (reference.getName().equals(referenceName)) {
((BaseComponentReference)reference).recordMetrics(exchange);
}
}
}
// 4 - reference stats
for (Reference reference : _switchYard.getReferences()) {
if (reference.getName().equals(referenceName)) {
reference.recordMetrics(exchange);
break;
}
}
}
}
| apache-2.0 |
greenbird/mule-access-log | src/main/java/com/greenbird/mule/http/log/converter/AbstractAccessLogConverter.java | 3832 | /*
* Copyright 2013 greenbird Integration Technology
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.greenbird.mule.http.log.converter;
import com.greenbird.mule.http.log.RequestPropertiesRetainer;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.pattern.LogEventPatternConverter;
import org.apache.logging.log4j.message.ObjectMessage;
import org.mule.api.MuleMessage;
import java.util.Date;
import java.util.Map;
public abstract class AbstractAccessLogConverter extends LogEventPatternConverter {
private static final String QUOTE_OPTION = ";qt";
private String defaultValue = "";
private boolean quote;
protected AbstractAccessLogConverter(String name, String[] options) {
super(name, name);
if (options != null && options.length > 0) {
String option = options[0];
if (option.equals(QUOTE_OPTION)) {
quote = true;
} else if (option.endsWith(QUOTE_OPTION)) {
quote = true;
defaultValue = option.replaceFirst("(.*)" + QUOTE_OPTION, "$1");
} else {
defaultValue = option;
}
}
}
@Override
public void format(LogEvent logEvent, StringBuilder stringBuilder) {
if (logEvent.getMessage() instanceof ObjectMessage
&& logEvent.getMessage().getParameters() != null
&& logEvent.getMessage().getParameters().length > 0
&& logEvent.getMessage().getParameters()[0] != null
&& logEvent.getMessage().getParameters()[0] instanceof MuleMessage) {
stringBuilder.append(formatResult(doConvert((MuleMessage) logEvent.getMessage().getParameters()[0])));
} else {
notifyError(String.format("Got log message '%s', but expected a message of type MuleMessage.", logEvent.getMessage()));
stringBuilder.append(formatResult(null));
}
}
/**
* Notify about an error situation without using the logger API. Using the logger API might lead to eternal recursive
* logging if the http log category is not properly configured to be logged separately from the other log categories.
*
* @param message the error message to log.
*/
protected void notifyError(String message) {
System.out.println(String.format("ERROR %s %s %s", new Date(), getClass().getName(), message));
}
protected String formatResult(String value) {
String result;
if (value != null && quote) {
result = String.format("\"%s\"", value);
} else {
result = value;
}
return result != null ? result : defaultValue;
}
@SuppressWarnings("unchecked")
protected <T> T getProperty(String propertyName, MuleMessage message) {
Object value = null;
Map<String, Object> retainedProperties =
message.getInvocationProperty(RequestPropertiesRetainer.INITIAL_REQUEST_PROPERTY);
if (retainedProperties != null) {
value = retainedProperties.get(propertyName);
}
if (value == null) {
value = message.getInboundProperty(propertyName);
}
return (T) value;
}
protected abstract String doConvert(MuleMessage message);
}
| apache-2.0 |
torakiki/sambox | src/main/java/org/sejda/sambox/pdmodel/graphics/image/JPEGFactory.java | 11619 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sejda.sambox.pdmodel.graphics.image;
import static java.util.Objects.nonNull;
import static org.sejda.commons.util.RequireUtils.requireIOCondition;
import java.awt.Transparency;
import java.awt.color.ColorSpace;
import java.awt.color.ICC_ColorSpace;
import java.awt.image.BufferedImage;
import java.awt.image.ColorConvertOp;
import java.awt.image.WritableRaster;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Iterator;
import javax.imageio.IIOImage;
import javax.imageio.ImageIO;
import javax.imageio.ImageReader;
import javax.imageio.ImageTypeSpecifier;
import javax.imageio.ImageWriteParam;
import javax.imageio.ImageWriter;
import javax.imageio.metadata.IIOMetadata;
import javax.imageio.stream.ImageInputStream;
import javax.imageio.stream.ImageOutputStream;
import org.sejda.commons.FastByteArrayOutputStream;
import org.sejda.commons.util.IOUtils;
import org.sejda.io.SeekableSource;
import org.sejda.io.SeekableSources;
import org.sejda.sambox.cos.COSName;
import org.sejda.sambox.pdmodel.graphics.color.PDColorSpace;
import org.sejda.sambox.pdmodel.graphics.color.PDDeviceCMYK;
import org.sejda.sambox.pdmodel.graphics.color.PDDeviceGray;
import org.sejda.sambox.pdmodel.graphics.color.PDDeviceRGB;
import org.w3c.dom.Element;
/**
* Factory for creating a PDImageXObject containing a JPEG compressed image.
*
* @author John Hewson
*/
public final class JPEGFactory
{
private JPEGFactory()
{
}
/**
* Creates a new JPEG Image XObject from an input stream containing JPEG data.
*
* The input stream data will be preserved and embedded in the PDF file without modification.
*
* @param file a JPEG file
* @return a new Image XObject
*
* @throws IOException if the input stream cannot be read
*/
public static PDImageXObject createFromFile(File file) throws IOException
{
try (SeekableSource source = SeekableSources.seekableSourceFrom(file))
{
return createFromSeekableSource(source);
}
}
public static PDImageXObject createFromSeekableSource(SeekableSource source) throws IOException
{
// read image
BufferedImage awtImage = readJpeg(source.asNewInputStream());
// create Image XObject from stream
PDImageXObject pdImage = new PDImageXObject(
new BufferedInputStream(source.asNewInputStream()), COSName.DCT_DECODE,
awtImage.getWidth(), awtImage.getHeight(),
awtImage.getColorModel().getComponentSize(0), getColorSpaceFromAWT(awtImage));
// no alpha
if (awtImage.getColorModel().hasAlpha())
{
throw new UnsupportedOperationException("alpha channel not implemented");
}
return pdImage;
}
public static BufferedImage readJpegFile(File file) throws IOException
{
return readJpeg(file);
}
private static BufferedImage readJpeg(Object fileOrStream) throws IOException
{
Iterator<ImageReader> readers = ImageIO.getImageReadersByFormatName("JPEG");
ImageReader reader = null;
while (readers.hasNext())
{
reader = readers.next();
if (reader.canReadRaster())
{
break;
}
}
requireIOCondition(nonNull(reader), "Cannot find an ImageIO reader for JPEG image");
try (ImageInputStream iis = ImageIO.createImageInputStream(fileOrStream))
{
reader.setInput(iis);
ImageIO.setUseCache(false);
return reader.read(0);
}
finally
{
reader.dispose();
}
}
/**
* Creates a new JPEG Image XObject from a Buffered Image.
*
* @param image the buffered image to embed
* @return a new Image XObject
* @throws IOException if the JPEG data cannot be written
*/
public static PDImageXObject createFromImage(BufferedImage image) throws IOException
{
return createFromImage(image, 0.75f);
}
/**
* Creates a new JPEG Image XObject from a Buffered Image and a given quality. The image will be created at 72 DPI.
*
* @param image the buffered image to embed
* @param quality the desired JPEG compression quality
* @return a new Image XObject
* @throws IOException if the JPEG data cannot be written
*/
public static PDImageXObject createFromImage(BufferedImage image, float quality)
throws IOException
{
return createFromImage(image, quality, 72);
}
/**
* Creates a new JPEG Image XObject from a Buffered Image, a given quality and DPI.
*
* @param image the buffered image to embed
* @param quality the desired JPEG compression quality
* @param dpi the desired DPI (resolution) of the JPEG
* @return a new Image XObject
* @throws IOException if the JPEG data cannot be written
*/
public static PDImageXObject createFromImage(BufferedImage image, float quality, int dpi)
throws IOException
{
return createJPEG(image, quality, dpi);
}
// returns the alpha channel of an image
private static BufferedImage getAlphaImage(BufferedImage image)
{
if (!image.getColorModel().hasAlpha())
{
return null;
}
if (image.getTransparency() == Transparency.BITMASK)
{
throw new UnsupportedOperationException("BITMASK Transparency JPEG compression is not"
+ " useful, use LosslessImageFactory instead");
}
WritableRaster alphaRaster = image.getAlphaRaster();
if (alphaRaster == null)
{
// happens sometimes (PDFBOX-2654) despite colormodel claiming to have alpha
return null;
}
BufferedImage alphaImage = new BufferedImage(image.getWidth(), image.getHeight(),
BufferedImage.TYPE_BYTE_GRAY);
alphaImage.setData(alphaRaster);
return alphaImage;
}
// Creates an Image XObject from a Buffered Image using JAI Image I/O
private static PDImageXObject createJPEG(BufferedImage image, float quality, int dpi)
throws IOException
{
// extract alpha channel (if any)
BufferedImage awtColorImage = getColorImage(image);
BufferedImage awtAlphaImage = getAlphaImage(image);
// create XObject
FastByteArrayOutputStream baos = new FastByteArrayOutputStream();
encodeImageToJPEGStream(awtColorImage, quality, dpi, baos);
ByteArrayInputStream byteStream = new ByteArrayInputStream(baos.toByteArray());
PDImageXObject pdImage = new PDImageXObject(byteStream, COSName.DCT_DECODE,
awtColorImage.getWidth(), awtColorImage.getHeight(), 8,
getColorSpaceFromAWT(awtColorImage));
// alpha -> soft mask
if (awtAlphaImage != null)
{
PDImage xAlpha = JPEGFactory.createFromImage(awtAlphaImage, quality);
pdImage.getCOSObject().setItem(COSName.SMASK, xAlpha);
}
return pdImage;
}
private static void encodeImageToJPEGStream(BufferedImage image, float quality, int dpi,
OutputStream out) throws IOException
{
// encode to JPEG
ImageOutputStream ios = null;
ImageWriter imageWriter = null;
try
{
// find JAI writer
imageWriter = ImageIO.getImageWritersBySuffix("jpeg").next();
ios = ImageIO.createImageOutputStream(out);
imageWriter.setOutput(ios);
// add compression
ImageWriteParam jpegParam = imageWriter.getDefaultWriteParam();
jpegParam.setCompressionMode(ImageWriteParam.MODE_EXPLICIT);
jpegParam.setCompressionQuality(quality);
// add metadata
ImageTypeSpecifier imageTypeSpecifier = new ImageTypeSpecifier(image);
IIOMetadata data = imageWriter.getDefaultImageMetadata(imageTypeSpecifier, jpegParam);
Element tree = (Element) data.getAsTree("javax_imageio_jpeg_image_1.0");
Element jfif = (Element) tree.getElementsByTagName("app0JFIF").item(0);
jfif.setAttribute("Xdensity", Integer.toString(dpi));
jfif.setAttribute("Ydensity", Integer.toString(dpi));
jfif.setAttribute("resUnits", "1"); // 1 = dots/inch
// write
imageWriter.write(data, new IIOImage(image, null, null), jpegParam);
}
finally
{
// clean up
IOUtils.closeQuietly(out);
IOUtils.closeQuietly(ios);
if (imageWriter != null)
{
imageWriter.dispose();
}
}
}
// returns a PDColorSpace for a given BufferedImage
public static PDColorSpace getColorSpaceFromAWT(BufferedImage awtImage)
{
if (awtImage.getColorModel().getNumComponents() == 1)
{
// 256 color (gray) JPEG
return PDDeviceGray.INSTANCE;
}
ColorSpace awtColorSpace = awtImage.getColorModel().getColorSpace();
if (awtColorSpace instanceof ICC_ColorSpace && !awtColorSpace.isCS_sRGB())
{
throw new UnsupportedOperationException("ICC color spaces not implemented");
}
switch (awtColorSpace.getType())
{
case ColorSpace.TYPE_RGB:
return PDDeviceRGB.INSTANCE;
case ColorSpace.TYPE_GRAY:
return PDDeviceGray.INSTANCE;
case ColorSpace.TYPE_CMYK:
return PDDeviceCMYK.INSTANCE;
default:
throw new UnsupportedOperationException(
"color space not implemented: " + awtColorSpace.getType());
}
}
// returns the color channels of an image
private static BufferedImage getColorImage(BufferedImage image)
{
if (!image.getColorModel().hasAlpha())
{
return image;
}
if (image.getColorModel().getColorSpace().getType() != ColorSpace.TYPE_RGB)
{
throw new UnsupportedOperationException("only RGB color spaces are implemented");
}
// create an RGB image without alpha
// BEWARE: the previous solution in the history
// g.setComposite(AlphaComposite.Src) and g.drawImage()
// didn't work properly for TYPE_4BYTE_ABGR.
// alpha values of 0 result in a black dest pixel!!!
BufferedImage rgbImage = new BufferedImage(image.getWidth(), image.getHeight(),
BufferedImage.TYPE_3BYTE_BGR);
return new ColorConvertOp(null).filter(image, rgbImage);
}
}
| apache-2.0 |
mark-watson/power-java | machine_learning_spark/src/test/java/com/markwatson/machine_learning/HelloTest.java | 711 | package com.markwatson.machine_learning;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import java.util.Set;
public class HelloTest extends TestCase {
/**
* Create the test case
*
* @param testName name of the test case
*/
public HelloTest(String testName)
{
super( testName );
}
/**
* @return the suite of tests being tested
*/
public static Test suite()
{
return new TestSuite( HelloTest.class );
}
/**
* Test that is just for side effect printouts:
*/
public void test1() throws Exception {
assertTrue(true);
HelloSpark.main(null);
}
}
| apache-2.0 |
mattyb149/incubator-drill | common/src/main/java/org/apache/drill/common/config/CommonConstants.java | 1490 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.common.config;
public interface CommonConstants {
public static final String CONFIG_DEFAULT = "drill-default.conf";
public static final String CONFIG_OVERRIDE = "drill-override.conf";
public static final String LOGICAL_OPERATOR_SCAN_PACKAGES = "drill.logical.operator.packages";
public static final String PHYSICAL_OPERATOR_SCAN_PACKAGES = "drill.physical.operator.packages";
public static final String STORAGE_PLUGIN_CONFIG_SCAN_PACKAGES = "drill.logical.storage.packages";
public static final String DRILL_JAR_MARKER_FILE = "drill-module.conf";
public static final String LOGICAL_FUNCTION_SCAN_PACKAGES = "drill.logical.function.packages";
}
| apache-2.0 |
eugenebakulin/spotify | src/main/java/com/bakulin/spotify/client/testing/HomePage.java | 3510 | package com.bakulin.spotify.client.testing;
import org.sikuli.script.App;
import org.sikuli.script.FindFailed;
import org.sikuli.script.Key;
import org.sikuli.script.Match;
import org.sikuli.script.Pattern;
import org.sikuli.script.Region;
import org.sikuli.script.Screen;
/**
* Page-Object Model representation of Home Page for Spotify desktop client.
*/
public class HomePage extends Page {
private static final Pattern PLAYER_MENU_PATTERN = new Pattern("player_menu.png");
private static final Pattern PLAYER_MENU_ACTIVE_PATTERN = new Pattern("player_menu_active.png");
private static final Pattern ARTISTS_HEADER_PATTERN = new Pattern("artists_header.png");
private static final Pattern ARTISTS_IMAGE_PATTERN = new Pattern("author_mc_hammer.png");
private static final Pattern SEARCH_INPUT_PATTERN = new Pattern("search_input.png");
private static final Pattern OVERVIEW_MENU_PATTERN = new Pattern("overview_menu.png");
private static final Pattern GREEN_PLAY_BUTTON_PATTERN = new Pattern("green_play_button.png");
private static final int TIMEOUT_HOME_PAGE_DISPLAY = 20;
public HomePage(Screen s) {
super(s);
}
/** Checks if Login page is displayed (i.e. logo is visible). */
@Override
void waitForPageToBeDisplayed() {
System.out.println("Checking of Home page is displayed. Looking for Player menu.");
try {
s.wait(PLAYER_MENU_PATTERN, TIMEOUT_HOME_PAGE_DISPLAY);
System.out.println("Player menu is displayed.");
} catch (FindFailed e) {
System.out.println("Player menu is not displayed.");
}
}
/*
* Checks if Home page is displayed after log.
*/
public Boolean isHomePageDisplayed() throws FindFailed {
System.out.println("Checking if Home Page is displayed.");
Match playerMenu = s.wait(PLAYER_MENU_PATTERN, TIMEOUT_HOME_PAGE_DISPLAY);
if (playerMenu != null) {
System.out.println("Found player menu!");
playerMenu.highlight(1);
return true;
}
System.out.println("Player menu was not found!");
return false;
}
public LoginPage logOut() {
SystemActions.logoutKeysCombination(s);
return new LoginPage(s);
}
public HomePage search(String searchRequest) throws FindFailed {
s.wait(OVERVIEW_MENU_PATTERN, TIMEOUT_HOME_PAGE_DISPLAY);
s.click(SEARCH_INPUT_PATTERN);
SystemActions.selectAllAndClear(s);
s.type(searchRequest);
s.type(SEARCH_INPUT_PATTERN, Key.ENTER);
return this;
}
public Boolean isArtistFound(Pattern pattern) throws FindFailed {
/*
* We assume Artist Pattern to be visible on first screen as we search by
* Artist name. Search by less relevant request should be a separate case.
*/
s.wait(ARTISTS_HEADER_PATTERN, TIMEOUT_HOME_PAGE_DISPLAY);
Region artist = s.find(ARTISTS_IMAGE_PATTERN);
if (artist != null) {
artist.highlight(1);
return true;
}
return false;
}
public HomePage clickGreenPlayButton() throws FindFailed {
s.wait(GREEN_PLAY_BUTTON_PATTERN, TIMEOUT_HOME_PAGE_DISPLAY);
s.click(GREEN_PLAY_BUTTON_PATTERN);
return this;
}
public Boolean isPlaying() {
try {
Match activePlayer = s.wait(PLAYER_MENU_ACTIVE_PATTERN, TIMEOUT_HOME_PAGE_DISPLAY);
activePlayer.highlight(1);
return true;
} catch (FindFailed e) {
System.out.println("Was not able to find pattern " + PLAYER_MENU_ACTIVE_PATTERN);
return false;
}
}
// TODO: handle advertisement popup
// TODO: try/catch where necessary
// TODO: prontf where necessary
}
| apache-2.0 |
nifeng1989/myjdbc | jdbc/src/main/java/net/fengni/jdbc/query/SQLBuilder.java | 4115 | package net.fengni.jdbc.query;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* Created by fengni on 2015/11/3.
*/
public class SQLBuilder {
private List<Condition> conditionList = new LinkedList<Condition>();
private List<Order> orderList = new LinkedList<Order>();
private Limit limit;
private String table;
private Map<String, Object> params;
public SQLBuilder(String table) {
this.table = table;
}
public SQLBuilder addOrder(Order order) {
if (order != null) {
orderList.add(order);
}
return this;
}
public SQLBuilder addOrder(List<Order> orderList) {
if (orderList != null) {
this.orderList.addAll(orderList);
}
return this;
}
public SQLBuilder removeOrder(Order order) {
orderList.remove(order);
return this;
}
public SQLBuilder addCondition(Condition condition) {
if (condition != null) {
conditionList.add(condition);
}
return this;
}
public SQLBuilder addCondition(List<Condition> conditionList) {
if (conditionList != null) {
conditionList.addAll(conditionList);
}
return this;
}
public SQLBuilder addCondition(Condition[] conditions) {
if (conditions != null && conditions.length > 0) {
for (Condition condition : conditions) {
conditionList.add(condition);
}
}
return this;
}
public SQLBuilder removeCondition(Condition condition) {
conditionList.remove(condition);
return this;
}
public Limit getLimit() {
return limit;
}
public SQLBuilder setLimit(Limit limit) {
this.limit = limit;
return this;
}
/**
* 根据查询条件拼SQL语句,
* eg. select * from %s where id=1;
* 拿到sql语句后需要 format一下表名
*/
public String select() {
StringBuilder sb = new StringBuilder();
sb.append("select * from ").append(table);
if (conditionList != null && conditionList.size() > 0) {
sb.append(" where ");
for (int i = 0; i < conditionList.size(); ) {
Condition condition = conditionList.get(i);
sb.append(condition.toSql());
i++;
if (i < conditionList.size()) {
sb.append(" and ");
}
}
}
if (orderList != null && orderList.size() > 0) {
sb.append(" order by ");
for (int i = 0; i < orderList.size(); ) {
Order order = orderList.get(i);
sb.append(order.toSql());
i++;
if (i <= orderList.size()) {
sb.append(",");
}
}
}
if (limit != null) {
sb.append(" limit ").append(limit.getStartIndex());
sb.append(",").append(limit.getSize());
}
return sb.toString();
}
public String count() {
StringBuilder sb = new StringBuilder();
sb.append("select count(*) from ").append(table);
if (conditionList != null && conditionList.size() > 0) {
sb.append(" where ");
for (int i = 0; i < conditionList.size(); ) {
Condition condition = conditionList.get(i);
sb.append(condition.toSql());
i++;
if (i < conditionList.size()) {
sb.append(" and ");
}
}
}
return sb.toString();
}
public Map<String, Object> getParams() {
if (conditionList != null && conditionList.size() > 0) {
if (params == null) {
params = new HashMap<String,Object>();
for (Condition condition : conditionList) {
params.put(condition.getColumn(), condition.getValue());
}
}
}
return params;
}
}
| apache-2.0 |
avthart/mina-sshd | sshd-core/src/main/java/org/apache/sshd/client/scp/ScpClient.java | 4005 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sshd.client.scp;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Path;
import java.nio.file.attribute.PosixFilePermission;
import java.util.Collection;
import java.util.concurrent.TimeUnit;
import org.apache.sshd.client.session.ClientSessionHolder;
import org.apache.sshd.common.scp.ScpTimestamp;
/**
*/
public interface ScpClient extends ClientSessionHolder {
enum Option {
Recursive,
PreserveAttributes,
TargetIsDirectory
}
/**
* Configurable value of the {@link org.apache.sshd.common.FactoryManager}
* for controlling the wait timeout for opening a channel for an SCP command
* in milliseconds. If not specified, then {@link #DEFAULT_EXEC_CHANNEL_OPEN_TIMEOUT}
* value is used
*/
String SCP_EXEC_CHANNEL_OPEN_TIMEOUT = "scp-exec-channel-open-timeout";
long DEFAULT_EXEC_CHANNEL_OPEN_TIMEOUT = TimeUnit.SECONDS.toMillis(30L);
void download(String remote, String local, Option... options) throws IOException;
void download(String remote, String local, Collection<Option> options) throws IOException;
void download(String remote, Path local, Option... options) throws IOException;
void download(String remote, Path local, Collection<Option> options) throws IOException;
// NOTE: the remote location MUST be a file or an exception is generated
void download(String remote, OutputStream local) throws IOException;
byte[] downloadBytes(String remote) throws IOException;
void download(String[] remote, String local, Option... options) throws IOException;
void download(String[] remote, String local, Collection<Option> options) throws IOException;
void download(String[] remote, Path local, Option... options) throws IOException;
void download(String[] remote, Path local, Collection<Option> options) throws IOException;
void upload(String local, String remote, Option... options) throws IOException;
void upload(String local, String remote, Collection<Option> options) throws IOException;
void upload(Path local, String remote, Option... options) throws IOException;
void upload(Path local, String remote, Collection<Option> options) throws IOException;
void upload(String[] local, String remote, Option... options) throws IOException;
void upload(String[] local, String remote, Collection<Option> options) throws IOException;
void upload(Path[] local, String remote, Option... options) throws IOException;
void upload(Path[] local, String remote, Collection<Option> options) throws IOException;
// NOTE: due to SCP command limitations, the amount of data to be uploaded must be known a-priori
// To upload a dynamic amount of data use SFTP
void upload(byte[] data, String remote, Collection<PosixFilePermission> perms, ScpTimestamp time) throws IOException;
void upload(byte[] data, int offset, int len, String remote, Collection<PosixFilePermission> perms, ScpTimestamp time) throws IOException;
void upload(InputStream local, String remote, long size, Collection<PosixFilePermission> perms, ScpTimestamp time) throws IOException;
}
| apache-2.0 |
microservices-summit-2016/resilience-demo | employer-service/src/main/java/com/capgemini/resilience/employer/rest/EmployerRestService.java | 2031 | package com.capgemini.resilience.employer.rest;
import java.util.List;
import javax.inject.Inject;
import javax.ws.rs.core.MediaType;
import com.capgemini.resilience.employer.model.Employer;
import com.capgemini.resilience.employer.service.EmployerSearchCriteria;
import com.capgemini.resilience.employer.service.EmployerService;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
/**
* Created by kso on 15.02.16.
*/
@RestController
public class EmployerRestService {
@Inject
private EmployerService service;
@RequestMapping(value = "/employer/{id}", method = RequestMethod.GET, produces = {MediaType.APPLICATION_JSON})
@ResponseBody
public Employer get(@PathVariable("id") Long id) {
return service.read(id);
}
@RequestMapping(value = "/employer/{id}", method = RequestMethod.DELETE, produces = {MediaType.APPLICATION_JSON})
@ResponseBody
public ResponseEntity<String> delete(@PathVariable("id") Long id) {
this.service.delete(id);
return new ResponseEntity<>(HttpStatus.OK);
}
@RequestMapping(value = "/employer", method = RequestMethod.POST, produces = {MediaType.APPLICATION_JSON})
@ResponseBody
public ResponseEntity<String> saveOrUpdate(@RequestBody Employer employer) {
this.service.saveOrUpdate(employer);
return new ResponseEntity<>(HttpStatus.OK);
}
@RequestMapping(value = "/employer", method = RequestMethod.GET, produces = {MediaType.APPLICATION_JSON})
@ResponseBody
List<Employer> search(EmployerSearchCriteria searchCriteria) {
return service.search(searchCriteria);
}
}
| apache-2.0 |
leapframework/framework | base/core/src/test/java/tested/base/instrument/InstrumentParent.java | 852 | /*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tested.base.instrument;
import leap.core.annotation.Inject;
import leap.core.annotation.Monitored;
@Monitored
public class InstrumentParent {
private @Inject InstrumentNested nested;
public void test() {
}
}
| apache-2.0 |
hgschmie/apache-maven-plugins | maven-dependency-plugin/src/main/java/org/apache/maven/plugins/dependency/analyze/AbstractAnalyzeMojo.java | 16864 | package org.apache.maven.plugins.dependency.analyze;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.File;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.project.MavenProject;
import org.apache.maven.shared.artifact.filter.StrictPatternExcludesArtifactFilter;
import org.apache.maven.shared.dependency.analyzer.ProjectDependencyAnalysis;
import org.apache.maven.shared.dependency.analyzer.ProjectDependencyAnalyzer;
import org.apache.maven.shared.dependency.analyzer.ProjectDependencyAnalyzerException;
import org.codehaus.plexus.PlexusConstants;
import org.codehaus.plexus.PlexusContainer;
import org.codehaus.plexus.context.Context;
import org.codehaus.plexus.context.ContextException;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Contextualizable;
import org.codehaus.plexus.util.xml.PrettyPrintXMLWriter;
/**
* Analyzes the dependencies of this project and determines which are: used and declared; used and undeclared; unused
* and declared.
*
* @author <a href="mailto:markhobson@gmail.com">Mark Hobson</a>
* @version $Id$
* @since 2.0-alpha-5
*/
public abstract class AbstractAnalyzeMojo
extends AbstractMojo
implements Contextualizable
{
// fields -----------------------------------------------------------------
/**
* The plexus context to look-up the right {@link ProjectDependencyAnalyzer} implementation depending on the mojo
* configuration.
*/
private Context context;
/**
* The Maven project to analyze.
*/
@Parameter( defaultValue = "${project}", readonly = true, required = true )
private MavenProject project;
/**
* Specify the project dependency analyzer to use (plexus component role-hint).
* By default, <a href="/shared/maven-dependency-analyzer/">maven-dependency-analyzer</a> is used.
*
* To use this, you must declare a dependency for this plugin that contains the code for the
* analyzer. The analyzer must have a declared Plexus role name, and you specify the role name
* here.
*
* @since 2.2
*/
@Parameter( property = "analyzer", defaultValue = "default" )
private String analyzer;
/**
* Whether to fail the build if a dependency warning is found.
*/
@Parameter( property = "failOnWarning", defaultValue = "false" )
private boolean failOnWarning;
/**
* Output used dependencies.
*/
@Parameter( property = "verbose", defaultValue = "false" )
private boolean verbose;
/**
* Ignore Runtime/Provided/Test/System scopes for unused dependency analysis.
*/
@Parameter( property = "ignoreNonCompile", defaultValue = "false" )
private boolean ignoreNonCompile;
/**
* Output the xml for the missing dependencies (used but not declared).
*
* @since 2.0-alpha-5
*/
@Parameter( property = "outputXML", defaultValue = "false" )
private boolean outputXML;
/**
* Output scriptable values for the missing dependencies (used but not declared).
*
* @since 2.0-alpha-5
*/
@Parameter( property = "scriptableOutput", defaultValue = "false" )
private boolean scriptableOutput;
/**
* Flag to use for scriptable output.
*
* @since 2.0-alpha-5
*/
@Parameter( property = "scriptableFlag", defaultValue = "$$$%%%" )
private String scriptableFlag;
/**
* Flag to use for scriptable output
*
* @since 2.0-alpha-5
*/
@Parameter( defaultValue = "${basedir}", readonly = true )
private File baseDir;
/**
* Target folder
*
* @since 2.0-alpha-5
*/
@Parameter( defaultValue = "${project.build.directory}", readonly = true )
private File outputDirectory;
/**
* Force dependencies as used, to override incomplete result caused by bytecode-level analysis.
* Dependency format is <code>groupId:artifactId</code>.
*
* @since 2.6
*/
@Parameter
private String[] usedDependencies;
/**
* Skip plugin execution completely.
*
* @since 2.7
*/
@Parameter( property = "mdep.analyze.skip", defaultValue = "false" )
private boolean skip;
/**
* List of dependencies that will be ignored.
*
* Any dependency on this list will be excluded from the "declared but unused" and the "used but undeclared" list.
*
* The filter syntax is:
*
* <pre>
* [groupId]:[artifactId]:[type]:[version]
* </pre>
*
* where each pattern segment is optional and supports full and partial <code>*</code> wildcards. An empty pattern
* segment is treated as an implicit wildcard.
* *
* <p>For example, <code>org.apache.*</code> will match all artifacts whose group id starts with
* <code>org.apache.</code>, and <code>:::*-SNAPSHOT</code> will match all snapshot artifacts.</p>
*
* @since 2.10
* @see StrictPatternIncludesArtifactFilter
*/
@Parameter
private String [] ignoredDependencies = new String[0];
/**
* List of dependencies that will be ignored if they are used but undeclared.
*
* The filter syntax is:
*
* <pre>
* [groupId]:[artifactId]:[type]:[version]
* </pre>
*
* where each pattern segment is optional and supports full and partial <code>*</code> wildcards. An empty pattern
* segment is treated as an implicit wildcard.
* *
* <p>For example, <code>org.apache.*</code> will match all artifacts whose group id starts with
* <code>org.apache.</code>, and <code>:::*-SNAPSHOT</code> will match all snapshot artifacts.</p>
*
* @since 2.10
* @see StrictPatternIncludesArtifactFilter
*/
@Parameter
private String [] ignoredUsedUndeclaredDependencies = new String[0];
/**
* List of dependencies that will be ignored if they are declared but unused.
*
* The filter syntax is:
*
* <pre>
* [groupId]:[artifactId]:[type]:[version]
* </pre>
*
* where each pattern segment is optional and supports full and partial <code>*</code> wildcards. An empty pattern
* segment is treated as an implicit wildcard.
* *
* <p>For example, <code>org.apache.*</code> will match all artifacts whose group id starts with
* <code>org.apache.</code>, and <code>:::*-SNAPSHOT</code> will match all snapshot artifacts.</p>
*
* @since 2.10
* @see StrictPatternIncludesArtifactFilter
*/
@Parameter
private String [] ignoredUnusedDeclaredDependencies = new String[0];
// Mojo methods -----------------------------------------------------------
/*
* @see org.apache.maven.plugin.Mojo#execute()
*/
@Override
public void execute()
throws MojoExecutionException, MojoFailureException
{
if ( isSkip() )
{
getLog().info( "Skipping plugin execution" );
return;
}
if ( "pom".equals( project.getPackaging() ) )
{
getLog().info( "Skipping pom project" );
return;
}
if ( outputDirectory == null || !outputDirectory.exists() )
{
getLog().info( "Skipping project with no build directory" );
return;
}
boolean warning = checkDependencies();
if ( warning && failOnWarning )
{
throw new MojoExecutionException( "Dependency problems found" );
}
}
protected ProjectDependencyAnalyzer createProjectDependencyAnalyzer()
throws MojoExecutionException
{
final String role = ProjectDependencyAnalyzer.ROLE;
final String roleHint = analyzer;
try
{
final PlexusContainer container = (PlexusContainer) context.get( PlexusConstants.PLEXUS_KEY );
return (ProjectDependencyAnalyzer) container.lookup( role, roleHint );
}
catch ( Exception exception )
{
throw new MojoExecutionException(
"Failed to instantiate ProjectDependencyAnalyser with role " + role + " / role-hint " + roleHint,
exception );
}
}
@Override
public void contextualize( Context context )
throws ContextException
{
this.context = context;
}
protected final boolean isSkip()
{
return skip;
}
// private methods --------------------------------------------------------
private boolean checkDependencies()
throws MojoExecutionException
{
ProjectDependencyAnalysis analysis;
try
{
analysis = createProjectDependencyAnalyzer().analyze( project );
if ( usedDependencies != null )
{
analysis = analysis.forceDeclaredDependenciesUsage( usedDependencies );
}
}
catch ( ProjectDependencyAnalyzerException exception )
{
throw new MojoExecutionException( "Cannot analyze dependencies", exception );
}
if ( ignoreNonCompile )
{
analysis = analysis.ignoreNonCompile();
}
Set<Artifact> usedDeclared = new LinkedHashSet<Artifact>( analysis.getUsedDeclaredArtifacts() );
Set<Artifact> usedUndeclared = new LinkedHashSet<Artifact>( analysis.getUsedUndeclaredArtifacts() );
Set<Artifact> unusedDeclared = new LinkedHashSet<Artifact>( analysis.getUnusedDeclaredArtifacts() );
Set<Artifact> ignoredUsedUndeclared = new LinkedHashSet<Artifact>();
Set<Artifact> ignoredUnusedDeclared = new LinkedHashSet<Artifact>();
ignoredUsedUndeclared.addAll( filterDependencies( usedUndeclared, ignoredDependencies ) );
ignoredUsedUndeclared.addAll( filterDependencies( usedUndeclared, ignoredUsedUndeclaredDependencies ) );
ignoredUnusedDeclared.addAll( filterDependencies( unusedDeclared, ignoredDependencies ) );
ignoredUnusedDeclared.addAll( filterDependencies( unusedDeclared, ignoredUnusedDeclaredDependencies ) );
boolean reported = false;
boolean warning = false;
if ( verbose && !usedDeclared.isEmpty() )
{
getLog().info( "Used declared dependencies found:" );
logArtifacts( analysis.getUsedDeclaredArtifacts(), false );
reported = true;
}
if ( !usedUndeclared.isEmpty() )
{
getLog().warn( "Used undeclared dependencies found:" );
logArtifacts( usedUndeclared, true );
reported = true;
warning = true;
}
if ( !unusedDeclared.isEmpty() )
{
getLog().warn( "Unused declared dependencies found:" );
logArtifacts( unusedDeclared, true );
reported = true;
warning = true;
}
if ( verbose && !ignoredUsedUndeclared.isEmpty() )
{
getLog().info( "Ignored used undeclared dependencies:" );
logArtifacts( ignoredUsedUndeclared, false );
reported = true;
}
if ( verbose && !ignoredUnusedDeclared.isEmpty() )
{
getLog().info( "Ignored unused declared dependencies:" );
logArtifacts( ignoredUnusedDeclared, false );
reported = true;
}
if ( outputXML )
{
writeDependencyXML( usedUndeclared );
}
if ( scriptableOutput )
{
writeScriptableOutput( usedUndeclared );
}
if ( !reported )
{
getLog().info( "No dependency problems found" );
}
return warning;
}
private void logArtifacts( Set<Artifact> artifacts, boolean warn )
{
if ( artifacts.isEmpty() )
{
getLog().info( " None" );
}
else
{
for ( Artifact artifact : artifacts )
{
// called because artifact will set the version to -SNAPSHOT only if I do this. MNG-2961
artifact.isSnapshot();
if ( warn )
{
getLog().warn( " " + artifact );
}
else
{
getLog().info( " " + artifact );
}
}
}
}
private void writeDependencyXML( Set<Artifact> artifacts )
{
if ( !artifacts.isEmpty() )
{
getLog().info( "Add the following to your pom to correct the missing dependencies: " );
StringWriter out = new StringWriter();
PrettyPrintXMLWriter writer = new PrettyPrintXMLWriter( out );
for ( Artifact artifact : artifacts )
{
// called because artifact will set the version to -SNAPSHOT only if I do this. MNG-2961
artifact.isSnapshot();
writer.startElement( "dependency" );
writer.startElement( "groupId" );
writer.writeText( artifact.getGroupId() );
writer.endElement();
writer.startElement( "artifactId" );
writer.writeText( artifact.getArtifactId() );
writer.endElement();
writer.startElement( "version" );
writer.writeText( artifact.getBaseVersion() );
if ( !StringUtils.isBlank( artifact.getClassifier() ) )
{
writer.startElement( "classifier" );
writer.writeText( artifact.getClassifier() );
writer.endElement();
}
writer.endElement();
if ( !Artifact.SCOPE_COMPILE.equals( artifact.getScope() ) )
{
writer.startElement( "scope" );
writer.writeText( artifact.getScope() );
writer.endElement();
}
writer.endElement();
}
getLog().info( "\n" + out.getBuffer() );
}
}
private void writeScriptableOutput( Set<Artifact> artifacts )
{
if ( !artifacts.isEmpty() )
{
getLog().info( "Missing dependencies: " );
String pomFile = baseDir.getAbsolutePath() + File.separatorChar + "pom.xml";
StringBuilder buf = new StringBuilder();
for ( Artifact artifact : artifacts )
{
// called because artifact will set the version to -SNAPSHOT only if I do this. MNG-2961
artifact.isSnapshot();
buf.append( scriptableFlag ).append( ":" ).append( pomFile ).append( ":" )
.append( artifact.getDependencyConflictId() ).append( ":" ).append( artifact.getClassifier() )
.append( ":" ).append( artifact.getBaseVersion() ).append( ":" ).append( artifact.getScope() )
.append( "\n" );
}
getLog().info( "\n" + buf );
}
}
private List<Artifact> filterDependencies( Set<Artifact> artifacts, String[] excludes )
throws MojoExecutionException
{
ArtifactFilter filter = new StrictPatternExcludesArtifactFilter( Arrays.asList( excludes ) );
List<Artifact> result = new ArrayList<Artifact>();
for ( Iterator<Artifact> it = artifacts.iterator(); it.hasNext(); )
{
Artifact artifact = it.next();
if ( !filter.include( artifact ) )
{
it.remove();
result.add( artifact );
}
}
return result;
}
}
| apache-2.0 |
epam-debrecen-rft-2015/atsy | web/src/main/java/com/epam/rft/atsy/web/controllers/ManagePositionsController.java | 717 | package com.epam.rft.atsy.web.controllers;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.servlet.ModelAndView;
/**
* Controller for the position managing page.
*/
@Controller
@RequestMapping(path = "/secure/positions/manage")
public class ManagePositionsController {
private static final String VIEW_NAME = "manage_positions";
/**
* Loads the page.
* @return the ModelAndView object which describes the page
*/
@RequestMapping(method = RequestMethod.GET)
public ModelAndView loadPage() {
return new ModelAndView(VIEW_NAME);
}
}
| apache-2.0 |
rafalkrupinski/micro | micro-random/src/main/java/com/hashnot/u/random/WeightedRandom.java | 2505 | package com.hashnot.u.random;
import com.hashnot.u.range.OverlappingRangeComparator;
import com.hashnot.u.range.Range;
import com.hashnot.u.range.RangeOfComparables;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import java.util.TreeMap;
import java.util.function.Function;
public class WeightedRandom<T> {
private final Random rnd;
private final TreeMap<Range<Double>, T> ranges = new TreeMap<>();
public WeightedRandom(Map<T, Double> weightedItems) {
this(weightedItems, new Random());
}
public WeightedRandom(Map<T, Double> weightedItems, Random random) {
this(weightedItems.entrySet(), Map.Entry::getKey, Map.Entry::getValue, random);
}
public <E> WeightedRandom(Iterable<E> weightedItems, Function<E, T> itemFunc, Function<E, Double> weightFunc) {
this(weightedItems, itemFunc, weightFunc, new Random());
}
public <E> WeightedRandom(Iterable<E> weightedItems, Function<E, T> itemFunc, Function<E, Double> weightFunc, Random random) {
Double bottom = 0d;
Map<T, Double> normalized = normalize(weightedItems, weightFunc, itemFunc);
for (Map.Entry<T, Double> wi : normalized.entrySet()) {
double weight = wi.getValue();
if (weight > 0) {
Double top = bottom + weight;
Range<Double> r = new RangeOfComparables<>(bottom, top, new OverlappingRangeComparator<>());
if (ranges.containsKey(r)) {
T other = ranges.get(r);
throw new RuntimeException(String.format("Range %s conflicts with range %s", r, other));
}
ranges.put(r, wi.getKey());
bottom = top;
}
}
rnd = random;
}
protected <E> Map<T, Double> normalize(Iterable<E> weightedItems, Function<E, Double> weightFunc, Function<E, T> elemFunc) {
Map<T, Double> normalized = new HashMap<>();
double sum = 0d;
for (E e : weightedItems) {
Double weight = weightFunc.apply(e);
assert weight != null;
sum += weight;
}
for (E e : weightedItems) {
normalized.put(elemFunc.apply(e), weightFunc.apply(e) / sum);
}
return normalized;
}
public T next() {
double key = rnd.nextDouble();
Range<Double> range = RangeOfComparables.point(key, new OverlappingRangeComparator<>());
return ranges.get(range);
}
}
| apache-2.0 |
sasrin/Hystrix | hystrix-core/src/test/java/com/netflix/hystrix/HystrixCommandTest.java | 181225 | /**
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.hystrix;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import com.netflix.hystrix.strategy.executionhook.HystrixCommandExecutionHook;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import rx.Observable;
import rx.Observer;
import rx.Scheduler;
import rx.functions.Action1;
import rx.functions.Func0;
import rx.observers.TestSubscriber;
import com.netflix.config.ConfigurationManager;
import com.netflix.hystrix.AbstractCommand.TryableSemaphore;
import com.netflix.hystrix.AbstractCommand.TryableSemaphoreActual;
import com.netflix.hystrix.HystrixCircuitBreakerTest.TestCircuitBreaker;
import com.netflix.hystrix.HystrixCommandProperties.ExecutionIsolationStrategy;
import com.netflix.hystrix.exception.HystrixBadRequestException;
import com.netflix.hystrix.exception.HystrixRuntimeException;
import com.netflix.hystrix.strategy.HystrixPlugins;
import com.netflix.hystrix.strategy.concurrency.HystrixContextRunnable;
import com.netflix.hystrix.strategy.concurrency.HystrixContextScheduler;
import com.netflix.hystrix.strategy.concurrency.HystrixRequestContext;
import com.netflix.hystrix.strategy.properties.HystrixProperty;
import static org.junit.Assert.*;
public class HystrixCommandTest extends CommonHystrixCommandTests<TestHystrixCommand<?>> {
@Before
public void prepareForTest() {
/* we must call this to simulate a new request lifecycle running and clearing caches */
HystrixRequestContext.initializeContext();
}
@After
public void cleanup() {
// instead of storing the reference from initialize we'll just get the current state and shutdown
if (HystrixRequestContext.getContextForCurrentThread() != null) {
// it could have been set NULL by the test
HystrixRequestContext.getContextForCurrentThread().shutdown();
}
// force properties to be clean as well
ConfigurationManager.getConfigInstance().clear();
HystrixCommandKey key = Hystrix.getCurrentThreadExecutingCommand();
if (key != null) {
System.out.println("WARNING: Hystrix.getCurrentThreadExecutingCommand() should be null but got: " + key + ". Can occur when calling queue() and never retrieving.");
}
}
/**
* Test a successful command execution.
*/
@Test
public void testExecutionSuccess() {
try {
TestHystrixCommand<?> command = getCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.SUCCESS);
assertEquals(FlexibleTestHystrixCommand.EXECUTE_VALUE, command.execute());
assertEquals(null, command.getFailedExecutionException());
assertNull(command.getExecutionException());
assertTrue(command.getExecutionTimeInMilliseconds() > -1);
assertTrue(command.isSuccessfulExecution());
assertCommandExecutionEvents(command, HystrixEventType.SUCCESS);
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
} catch (Exception e) {
e.printStackTrace();
fail("We received an exception.");
}
}
/**
* Test that a command can not be executed multiple times.
*/
@Test
public void testExecutionMultipleTimes() {
TestHystrixCommand<?> command = getCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.SUCCESS);
assertFalse(command.isExecutionComplete());
// first should succeed
assertEquals(FlexibleTestHystrixCommand.EXECUTE_VALUE, command.execute());
assertTrue(command.isExecutionComplete());
assertTrue(command.isExecutedInThread());
assertTrue(command.getExecutionTimeInMilliseconds() > -1);
assertTrue(command.isSuccessfulExecution());
assertNull(command.getExecutionException());
try {
// second should fail
command.execute();
fail("we should not allow this ... it breaks the state of request logs");
} catch (IllegalStateException e) {
e.printStackTrace();
// we want to get here
}
try {
// queue should also fail
command.queue();
fail("we should not allow this ... it breaks the state of request logs");
} catch (IllegalStateException e) {
e.printStackTrace();
// we want to get here
}
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
assertCommandExecutionEvents(command, HystrixEventType.SUCCESS);
}
/**
* Test a command execution that throws an HystrixException and didn't implement getFallback.
*/
@Test
public void testExecutionHystrixFailureWithNoFallback() {
TestHystrixCommand<?> command = getCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.HYSTRIX_FAILURE, AbstractTestHystrixCommand.FallbackResult.UNIMPLEMENTED);
try {
command.execute();
fail("we shouldn't get here");
} catch (HystrixRuntimeException e) {
e.printStackTrace();
assertNotNull(e.getFallbackException());
assertNotNull(e.getImplementingClass());
} catch (Exception e) {
e.printStackTrace();
fail("We should always get an HystrixRuntimeException when an error occurs.");
}
assertTrue(command.getExecutionTimeInMilliseconds() > -1);
assertTrue(command.isFailedExecution());
assertCommandExecutionEvents(command, HystrixEventType.FAILURE, HystrixEventType.FALLBACK_MISSING);
assertNotNull(command.getExecutionException());
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* Test a command execution that throws an unknown exception (not HystrixException) and didn't implement getFallback.
*/
@Test
public void testExecutionFailureWithNoFallback() {
TestHystrixCommand<?> command = getCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.FAILURE, AbstractTestHystrixCommand.FallbackResult.UNIMPLEMENTED);
try {
command.execute();
fail("we shouldn't get here");
} catch (HystrixRuntimeException e) {
e.printStackTrace();
assertNotNull(e.getFallbackException());
assertNotNull(e.getImplementingClass());
} catch (Exception e) {
e.printStackTrace();
fail("We should always get an HystrixRuntimeException when an error occurs.");
}
assertTrue(command.getExecutionTimeInMilliseconds() > -1);
assertTrue(command.isFailedExecution());
assertCommandExecutionEvents(command, HystrixEventType.FAILURE, HystrixEventType.FALLBACK_MISSING);
assertNotNull(command.getExecutionException());
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* Test a command execution that fails but has a fallback.
*/
@Test
public void testExecutionFailureWithFallback() {
TestHystrixCommand<?> command = getCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.FAILURE, AbstractTestHystrixCommand.FallbackResult.SUCCESS);
try {
assertEquals(FlexibleTestHystrixCommand.FALLBACK_VALUE, command.execute());
} catch (Exception e) {
e.printStackTrace();
fail("We should have received a response from the fallback.");
}
assertEquals("Execution Failure for TestHystrixCommand", command.getFailedExecutionException().getMessage());
assertTrue(command.getExecutionTimeInMilliseconds() > -1);
assertTrue(command.isFailedExecution());
assertCommandExecutionEvents(command, HystrixEventType.FAILURE, HystrixEventType.FALLBACK_SUCCESS);
assertNotNull(command.getExecutionException());
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* Test a command execution that fails, has getFallback implemented but that fails as well.
*/
@Test
public void testExecutionFailureWithFallbackFailure() {
TestHystrixCommand<?> command = getCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.FAILURE, AbstractTestHystrixCommand.FallbackResult.FAILURE);
try {
command.execute();
fail("we shouldn't get here");
} catch (HystrixRuntimeException e) {
System.out.println("------------------------------------------------");
e.printStackTrace();
System.out.println("------------------------------------------------");
assertNotNull(e.getFallbackException());
}
assertTrue(command.getExecutionTimeInMilliseconds() > -1);
assertTrue(command.isFailedExecution());
assertCommandExecutionEvents(command, HystrixEventType.FAILURE, HystrixEventType.FALLBACK_FAILURE);
assertNotNull(command.getExecutionException());
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* Test a successful command execution (asynchronously).
*/
@Test
public void testQueueSuccess() {
TestHystrixCommand<?> command = getCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.SUCCESS);
try {
Future<?> future = command.queue();
assertEquals(FlexibleTestHystrixCommand.EXECUTE_VALUE, future.get());
} catch (Exception e) {
e.printStackTrace();
fail("We received an exception.");
}
assertTrue(command.getExecutionTimeInMilliseconds() > -1);
assertTrue(command.isSuccessfulExecution());
assertCommandExecutionEvents(command, HystrixEventType.SUCCESS);
assertNull(command.getExecutionException());
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* Test a command execution (asynchronously) that throws an HystrixException and didn't implement getFallback.
*/
@Test
public void testQueueKnownFailureWithNoFallback() {
TestHystrixCommand<?> command = getCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.HYSTRIX_FAILURE, AbstractTestHystrixCommand.FallbackResult.UNIMPLEMENTED);
try {
command.queue().get();
fail("we shouldn't get here");
} catch (Exception e) {
e.printStackTrace();
if (e.getCause() instanceof HystrixRuntimeException) {
HystrixRuntimeException de = (HystrixRuntimeException) e.getCause();
assertNotNull(de.getFallbackException());
assertNotNull(de.getImplementingClass());
} else {
fail("the cause should be HystrixRuntimeException");
}
}
assertTrue(command.getExecutionTimeInMilliseconds() > -1);
assertTrue(command.isFailedExecution());
assertCommandExecutionEvents(command, HystrixEventType.FAILURE, HystrixEventType.FALLBACK_MISSING);
assertNotNull(command.getExecutionException());
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* Test a command execution (asynchronously) that throws an unknown exception (not HystrixException) and didn't implement getFallback.
*/
@Test
public void testQueueUnknownFailureWithNoFallback() {
TestHystrixCommand<?> command = getCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.FAILURE, AbstractTestHystrixCommand.FallbackResult.UNIMPLEMENTED);
try {
command.queue().get();
fail("we shouldn't get here");
} catch (Exception e) {
e.printStackTrace();
if (e.getCause() instanceof HystrixRuntimeException) {
HystrixRuntimeException de = (HystrixRuntimeException) e.getCause();
assertNotNull(de.getFallbackException());
assertNotNull(de.getImplementingClass());
} else {
fail("the cause should be HystrixRuntimeException");
}
}
assertTrue(command.getExecutionTimeInMilliseconds() > -1);
assertTrue(command.isFailedExecution());
assertCommandExecutionEvents(command, HystrixEventType.FAILURE, HystrixEventType.FALLBACK_MISSING);
assertNotNull(command.getExecutionException());
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* Test a command execution (asynchronously) that fails but has a fallback.
*/
@Test
public void testQueueFailureWithFallback() {
TestHystrixCommand<?> command = getCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.FAILURE, AbstractTestHystrixCommand.FallbackResult.SUCCESS);
try {
Future<?> future = command.queue();
assertEquals(FlexibleTestHystrixCommand.FALLBACK_VALUE, future.get());
} catch (Exception e) {
e.printStackTrace();
fail("We should have received a response from the fallback.");
}
assertTrue(command.getExecutionTimeInMilliseconds() > -1);
assertTrue(command.isFailedExecution());
assertCommandExecutionEvents(command, HystrixEventType.FAILURE, HystrixEventType.FALLBACK_SUCCESS);
assertNotNull(command.getExecutionException());
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* Test a command execution (asynchronously) that fails, has getFallback implemented but that fails as well.
*/
@Test
public void testQueueFailureWithFallbackFailure() {
TestHystrixCommand<?> command = getCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.FAILURE, AbstractTestHystrixCommand.FallbackResult.FAILURE);
try {
command.queue().get();
fail("we shouldn't get here");
} catch (Exception e) {
if (e.getCause() instanceof HystrixRuntimeException) {
HystrixRuntimeException de = (HystrixRuntimeException) e.getCause();
e.printStackTrace();
assertNotNull(de.getFallbackException());
} else {
fail("the cause should be HystrixRuntimeException");
}
}
assertTrue(command.getExecutionTimeInMilliseconds() > -1);
assertTrue(command.isFailedExecution());
assertCommandExecutionEvents(command, HystrixEventType.FAILURE, HystrixEventType.FALLBACK_FAILURE);
assertNotNull(command.getExecutionException());
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* Test a successful command execution.
*/
@Test
public void testObserveSuccess() {
try {
TestHystrixCommand<?> command = getCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.SUCCESS);
assertEquals(FlexibleTestHystrixCommand.EXECUTE_VALUE, command.observe().toBlocking().single());
assertEquals(null, command.getFailedExecutionException());
assertTrue(command.getExecutionTimeInMilliseconds() > -1);
assertTrue(command.isSuccessfulExecution());
assertCommandExecutionEvents(command, HystrixEventType.SUCCESS);
assertNull(command.getExecutionException());
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
} catch (Exception e) {
e.printStackTrace();
fail("We received an exception.");
}
}
/**
* Test a successful command execution.
*/
@Test
public void testCallbackThreadForThreadIsolation() throws Exception {
final AtomicReference<Thread> commandThread = new AtomicReference<Thread>();
final AtomicReference<Thread> subscribeThread = new AtomicReference<Thread>();
TestHystrixCommand<Boolean> command = new TestHystrixCommand<Boolean>(TestHystrixCommand.testPropsBuilder()) {
@Override
protected Boolean run() {
commandThread.set(Thread.currentThread());
return true;
}
};
final CountDownLatch latch = new CountDownLatch(1);
command.toObservable().subscribe(new Observer<Boolean>() {
@Override
public void onCompleted() {
latch.countDown();
}
@Override
public void onError(Throwable e) {
latch.countDown();
e.printStackTrace();
}
@Override
public void onNext(Boolean args) {
subscribeThread.set(Thread.currentThread());
}
});
if (!latch.await(2000, TimeUnit.MILLISECONDS)) {
fail("timed out");
}
assertNotNull(commandThread.get());
assertNotNull(subscribeThread.get());
System.out.println("Command Thread: " + commandThread.get());
System.out.println("Subscribe Thread: " + subscribeThread.get());
assertTrue(commandThread.get().getName().startsWith("hystrix-"));
assertTrue(subscribeThread.get().getName().startsWith("hystrix-"));
}
/**
* Test a successful command execution.
*/
@Test
public void testCallbackThreadForSemaphoreIsolation() throws Exception {
final AtomicReference<Thread> commandThread = new AtomicReference<Thread>();
final AtomicReference<Thread> subscribeThread = new AtomicReference<Thread>();
TestHystrixCommand<Boolean> command = new TestHystrixCommand<Boolean>(TestHystrixCommand.testPropsBuilder()
.setCommandPropertiesDefaults(HystrixCommandPropertiesTest.getUnitTestPropertiesSetter().withExecutionIsolationStrategy(ExecutionIsolationStrategy.SEMAPHORE))) {
@Override
protected Boolean run() {
commandThread.set(Thread.currentThread());
return true;
}
};
final CountDownLatch latch = new CountDownLatch(1);
command.toObservable().subscribe(new Observer<Boolean>() {
@Override
public void onCompleted() {
latch.countDown();
}
@Override
public void onError(Throwable e) {
latch.countDown();
e.printStackTrace();
}
@Override
public void onNext(Boolean args) {
subscribeThread.set(Thread.currentThread());
}
});
if (!latch.await(2000, TimeUnit.MILLISECONDS)) {
fail("timed out");
}
assertNotNull(commandThread.get());
assertNotNull(subscribeThread.get());
System.out.println("Command Thread: " + commandThread.get());
System.out.println("Subscribe Thread: " + subscribeThread.get());
String mainThreadName = Thread.currentThread().getName();
// semaphore should be on the calling thread
assertTrue(commandThread.get().getName().equals(mainThreadName));
assertTrue(subscribeThread.get().getName().equals(mainThreadName));
}
/**
* Tests that the circuit-breaker reports itself as "OPEN" if set as forced-open
*/
@Test
public void testCircuitBreakerReportsOpenIfForcedOpen() {
HystrixCommand<Boolean> cmd = new HystrixCommand<Boolean>(HystrixCommand.Setter.withGroupKey(HystrixCommandGroupKey.Factory.asKey("GROUP")).andCommandPropertiesDefaults(new HystrixCommandProperties.Setter().withCircuitBreakerForceOpen(true))) {
@Override
protected Boolean run() throws Exception {
return true;
}
@Override
protected Boolean getFallback() {
return false;
}
};
assertFalse(cmd.execute()); //fallback should fire
System.out.println("RESULT : " + cmd.getExecutionEvents());
assertTrue(cmd.isCircuitBreakerOpen());
}
/**
* Tests that the circuit-breaker reports itself as "CLOSED" if set as forced-closed
*/
@Test
public void testCircuitBreakerReportsClosedIfForcedClosed() {
HystrixCommand<Boolean> cmd = new HystrixCommand<Boolean>(HystrixCommand.Setter.withGroupKey(HystrixCommandGroupKey.Factory.asKey("GROUP")).andCommandPropertiesDefaults(
new HystrixCommandProperties.Setter().withCircuitBreakerForceOpen(false).withCircuitBreakerForceClosed(true))) {
@Override
protected Boolean run() throws Exception {
return true;
}
@Override
protected Boolean getFallback() {
return false;
}
};
assertTrue(cmd.execute());
System.out.println("RESULT : " + cmd.getExecutionEvents());
assertFalse(cmd.isCircuitBreakerOpen());
}
/**
* Test that the circuit-breaker is shared across HystrixCommand objects with the same CommandKey.
* <p>
* This will test HystrixCommand objects with a single circuit-breaker (as if each injected with same CommandKey)
* <p>
* Multiple HystrixCommand objects with the same dependency use the same circuit-breaker.
*/
@Test
public void testCircuitBreakerAcrossMultipleCommandsButSameCircuitBreaker() throws InterruptedException{
HystrixCommandKey key = HystrixCommandKey.Factory.asKey("FlexibleTestHystrixCommandWithFallback");
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker(key);
/* fail 3 times and then it should trip the circuit and stop executing */
// failure 1
TestHystrixCommand<?> attempt1 = getSharedCircuitBreakerCommand(ExecutionIsolationStrategy.THREAD, circuitBreaker);
attempt1.execute();
Thread.sleep(1000);
assertTrue(attempt1.isResponseFromFallback());
assertFalse(attempt1.isCircuitBreakerOpen());
assertFalse(attempt1.isResponseShortCircuited());
// failure 2 with a different command, same circuit breaker
TestHystrixCommand<?> attempt2 = getSharedCircuitBreakerCommand(ExecutionIsolationStrategy.THREAD, circuitBreaker);
attempt2.execute();
Thread.sleep(100);
assertTrue(attempt2.isFailedExecution());
assertTrue(attempt2.isResponseFromFallback());
assertFalse(attempt2.isCircuitBreakerOpen());
assertFalse(attempt2.isResponseShortCircuited());
// failure 3 of the Hystrix, 2nd for this particular HystrixCommand
TestHystrixCommand<?> attempt3 = getSharedCircuitBreakerCommand(ExecutionIsolationStrategy.THREAD, circuitBreaker);
attempt3.execute();
Thread.sleep(100);
assertTrue(attempt3.isFailedExecution());
assertTrue(attempt3.isResponseFromFallback());
assertFalse(attempt3.isResponseShortCircuited());
// it should now be 'open' and prevent further executions
// after having 3 failures on the Hystrix that these 2 different HystrixCommand objects are for
assertTrue(attempt3.isCircuitBreakerOpen());
// attempt 4
TestHystrixCommand<?> attempt4 = getSharedCircuitBreakerCommand(ExecutionIsolationStrategy.THREAD, circuitBreaker);
attempt4.execute();
Thread.sleep(100);
assertTrue(attempt4.isResponseFromFallback());
// this should now be true as the response will be short-circuited
assertTrue(attempt4.isResponseShortCircuited());
// this should remain open
assertTrue(attempt4.isCircuitBreakerOpen());
assertSaneHystrixRequestLog(4);
assertCommandExecutionEvents(attempt1, HystrixEventType.FAILURE, HystrixEventType.FALLBACK_SUCCESS);
assertCommandExecutionEvents(attempt2, HystrixEventType.FAILURE, HystrixEventType.FALLBACK_SUCCESS);
assertCommandExecutionEvents(attempt3, HystrixEventType.FAILURE, HystrixEventType.FALLBACK_SUCCESS);
assertCommandExecutionEvents(attempt4, HystrixEventType.SHORT_CIRCUITED, HystrixEventType.FALLBACK_SUCCESS);
}
/**
* Test that the circuit-breaker being disabled doesn't wreak havoc.
*/
@Test
public void testExecutionSuccessWithCircuitBreakerDisabled() {
TestHystrixCommand<?> command = getCircuitBreakerDisabledCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.SUCCESS);
try {
assertEquals(FlexibleTestHystrixCommand.EXECUTE_VALUE, command.execute());
} catch (Exception e) {
e.printStackTrace();
fail("We received an exception.");
}
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
// we'll still get metrics ... just not the circuit breaker opening/closing
assertCommandExecutionEvents(command, HystrixEventType.SUCCESS);
}
/**
* Test a command execution timeout where the command didn't implement getFallback.
*/
@Test
public void testExecutionTimeoutWithNoFallback() {
TestHystrixCommand<?> command = getLatentCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.SUCCESS, 200, AbstractTestHystrixCommand.FallbackResult.UNIMPLEMENTED, 50);
try {
command.execute();
fail("we shouldn't get here");
} catch (Exception e) {
// e.printStackTrace();
if (e instanceof HystrixRuntimeException) {
HystrixRuntimeException de = (HystrixRuntimeException) e;
assertNotNull(de.getFallbackException());
assertTrue(de.getFallbackException() instanceof UnsupportedOperationException);
assertNotNull(de.getImplementingClass());
assertNotNull(de.getCause());
assertTrue(de.getCause() instanceof TimeoutException);
} else {
fail("the exception should be HystrixRuntimeException");
}
}
// the time should be 50+ since we timeout at 50ms
assertTrue("Execution Time is: " + command.getExecutionTimeInMilliseconds(), command.getExecutionTimeInMilliseconds() >= 50);
assertTrue(command.isResponseTimedOut());
assertFalse(command.isResponseFromFallback());
assertFalse(command.isResponseRejected());
assertCommandExecutionEvents(command, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_MISSING);
assertNotNull(command.getExecutionException());
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* Test a command execution timeout where the command implemented getFallback.
*/
@Test
public void testExecutionTimeoutWithFallback() {
TestHystrixCommand<?> command = getLatentCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.SUCCESS, 200, AbstractTestHystrixCommand.FallbackResult.SUCCESS, 50);
try {
assertEquals(FlexibleTestHystrixCommand.FALLBACK_VALUE, command.execute());
// the time should be 50+ since we timeout at 50ms
assertTrue("Execution Time is: " + command.getExecutionTimeInMilliseconds(), command.getExecutionTimeInMilliseconds() >= 50);
assertFalse(command.isCircuitBreakerOpen());
assertFalse(command.isResponseShortCircuited());
assertTrue(command.isResponseTimedOut());
assertTrue(command.isResponseFromFallback());
} catch (Exception e) {
e.printStackTrace();
fail("We should have received a response from the fallback.");
}
assertCommandExecutionEvents(command, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_SUCCESS);
assertNotNull(command.getExecutionException());
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* Test a command execution timeout where the command implemented getFallback but it fails.
*/
@Test
public void testExecutionTimeoutFallbackFailure() {
TestHystrixCommand<?> command = getLatentCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.SUCCESS, 200, AbstractTestHystrixCommand.FallbackResult.FAILURE, 50);
try {
command.execute();
fail("we shouldn't get here");
} catch (Exception e) {
if (e instanceof HystrixRuntimeException) {
HystrixRuntimeException de = (HystrixRuntimeException) e;
assertNotNull(de.getFallbackException());
assertFalse(de.getFallbackException() instanceof UnsupportedOperationException);
assertNotNull(de.getImplementingClass());
assertNotNull(de.getCause());
assertTrue(de.getCause() instanceof TimeoutException);
} else {
fail("the exception should be HystrixRuntimeException");
}
}
assertNotNull(command.getExecutionException());
// the time should be 50+ since we timeout at 50ms
assertTrue("Execution Time is: " + command.getExecutionTimeInMilliseconds(), command.getExecutionTimeInMilliseconds() >= 50);
assertCommandExecutionEvents(command, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_FAILURE);
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* Test that the command finishing AFTER a timeout (because thread continues in background) does not register a SUCCESS
*/
@Test
public void testCountersOnExecutionTimeout() {
TestHystrixCommand<?> command = getCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.SUCCESS, 200, AbstractTestHystrixCommand.FallbackResult.SUCCESS, 50);
try {
command.execute();
/* wait long enough for the command to have finished */
Thread.sleep(200);
/* response should still be the same as 'testCircuitBreakerOnExecutionTimeout' */
assertTrue(command.isResponseFromFallback());
assertFalse(command.isCircuitBreakerOpen());
assertFalse(command.isResponseShortCircuited());
assertTrue(command.getExecutionTimeInMilliseconds() > -1);
assertTrue(command.isResponseTimedOut());
assertFalse(command.isSuccessfulExecution());
assertNotNull(command.getExecutionException());
} catch (Exception e) {
e.printStackTrace();
fail("We should have received a response from the fallback.");
}
assertCommandExecutionEvents(command, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_SUCCESS);
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* Test a queued command execution timeout where the command didn't implement getFallback.
* <p>
* We specifically want to protect against developers queuing commands and using queue().get() without a timeout (such as queue().get(3000, TimeUnit.Milliseconds)) and ending up blocking
* indefinitely by skipping the timeout protection of the execute() command.
*/
@Test
public void testQueuedExecutionTimeoutWithNoFallback() {
TestHystrixCommand<?> command = getCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.SUCCESS, 200, AbstractTestHystrixCommand.FallbackResult.UNIMPLEMENTED, 50);
try {
command.queue().get();
fail("we shouldn't get here");
} catch (Exception e) {
e.printStackTrace();
if (e instanceof ExecutionException && e.getCause() instanceof HystrixRuntimeException) {
HystrixRuntimeException de = (HystrixRuntimeException) e.getCause();
assertNotNull(de.getFallbackException());
assertTrue(de.getFallbackException() instanceof UnsupportedOperationException);
assertNotNull(de.getImplementingClass());
assertNotNull(de.getCause());
assertTrue(de.getCause() instanceof TimeoutException);
} else {
fail("the exception should be ExecutionException with cause as HystrixRuntimeException");
}
}
assertTrue(command.getExecutionTimeInMilliseconds() > -1);
assertTrue(command.isResponseTimedOut());
assertCommandExecutionEvents(command, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_MISSING);
assertNotNull(command.getExecutionException());
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* Test a queued command execution timeout where the command implemented getFallback.
* <p>
* We specifically want to protect against developers queuing commands and using queue().get() without a timeout (such as queue().get(3000, TimeUnit.Milliseconds)) and ending up blocking
* indefinitely by skipping the timeout protection of the execute() command.
*/
@Test
public void testQueuedExecutionTimeoutWithFallback() {
TestHystrixCommand<?> command = getCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.SUCCESS, 200, AbstractTestHystrixCommand.FallbackResult.SUCCESS, 50);
try {
assertEquals(FlexibleTestHystrixCommand.FALLBACK_VALUE, command.queue().get());
} catch (Exception e) {
e.printStackTrace();
fail("We should have received a response from the fallback.");
}
assertCommandExecutionEvents(command, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_SUCCESS);
assertNotNull(command.getExecutionException());
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* Test a queued command execution timeout where the command implemented getFallback but it fails.
* <p>
* We specifically want to protect against developers queuing commands and using queue().get() without a timeout (such as queue().get(3000, TimeUnit.Milliseconds)) and ending up blocking
* indefinitely by skipping the timeout protection of the execute() command.
*/
@Test
public void testQueuedExecutionTimeoutFallbackFailure() {
TestHystrixCommand<?> command = getCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.SUCCESS, 200, AbstractTestHystrixCommand.FallbackResult.FAILURE, 50);
try {
command.queue().get();
fail("we shouldn't get here");
} catch (Exception e) {
if (e instanceof ExecutionException && e.getCause() instanceof HystrixRuntimeException) {
HystrixRuntimeException de = (HystrixRuntimeException) e.getCause();
assertNotNull(de.getFallbackException());
assertFalse(de.getFallbackException() instanceof UnsupportedOperationException);
assertNotNull(de.getImplementingClass());
assertNotNull(de.getCause());
assertTrue(de.getCause() instanceof TimeoutException);
} else {
fail("the exception should be ExecutionException with cause as HystrixRuntimeException");
}
}
assertCommandExecutionEvents(command, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_FAILURE);
assertNotNull(command.getExecutionException());
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* Test a queued command execution timeout where the command didn't implement getFallback.
* <p>
* We specifically want to protect against developers queuing commands and using queue().get() without a timeout (such as queue().get(3000, TimeUnit.Milliseconds)) and ending up blocking
* indefinitely by skipping the timeout protection of the execute() command.
*/
@Test
public void testObservedExecutionTimeoutWithNoFallback() {
TestHystrixCommand<?> command = getCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.SUCCESS, 200, AbstractTestHystrixCommand.FallbackResult.UNIMPLEMENTED, 50);
try {
command.observe().toBlocking().single();
fail("we shouldn't get here");
} catch (Exception e) {
e.printStackTrace();
if (e instanceof HystrixRuntimeException) {
HystrixRuntimeException de = (HystrixRuntimeException) e;
assertNotNull(de.getFallbackException());
assertTrue(de.getFallbackException() instanceof UnsupportedOperationException);
assertNotNull(de.getImplementingClass());
assertNotNull(de.getCause());
assertTrue(de.getCause() instanceof TimeoutException);
} else {
fail("the exception should be ExecutionException with cause as HystrixRuntimeException");
}
}
assertTrue(command.getExecutionTimeInMilliseconds() > -1);
assertTrue(command.isResponseTimedOut());
assertCommandExecutionEvents(command, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_MISSING);
assertNotNull(command.getExecutionException());
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* Test a queued command execution timeout where the command implemented getFallback.
* <p>
* We specifically want to protect against developers queuing commands and using queue().get() without a timeout (such as queue().get(3000, TimeUnit.Milliseconds)) and ending up blocking
* indefinitely by skipping the timeout protection of the execute() command.
*/
@Test
public void testObservedExecutionTimeoutWithFallback() {
TestHystrixCommand<?> command = getCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.SUCCESS, 200, AbstractTestHystrixCommand.FallbackResult.SUCCESS, 50);
try {
assertEquals(FlexibleTestHystrixCommand.FALLBACK_VALUE, command.observe().toBlocking().single());
} catch (Exception e) {
e.printStackTrace();
fail("We should have received a response from the fallback.");
}
assertCommandExecutionEvents(command, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_SUCCESS);
assertNotNull(command.getExecutionException());
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* Test a queued command execution timeout where the command implemented getFallback but it fails.
* <p>
* We specifically want to protect against developers queuing commands and using queue().get() without a timeout (such as queue().get(3000, TimeUnit.Milliseconds)) and ending up blocking
* indefinitely by skipping the timeout protection of the execute() command.
*/
@Test
public void testObservedExecutionTimeoutFallbackFailure() {
TestHystrixCommand<?> command = getCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.SUCCESS, 200, AbstractTestHystrixCommand.FallbackResult.FAILURE, 50);
try {
command.observe().toBlocking().single();
fail("we shouldn't get here");
} catch (Exception e) {
if (e instanceof HystrixRuntimeException) {
HystrixRuntimeException de = (HystrixRuntimeException) e;
assertNotNull(de.getFallbackException());
assertFalse(de.getFallbackException() instanceof UnsupportedOperationException);
assertNotNull(de.getImplementingClass());
assertNotNull(de.getCause());
assertTrue(de.getCause() instanceof TimeoutException);
} else {
fail("the exception should be ExecutionException with cause as HystrixRuntimeException");
}
}
assertCommandExecutionEvents(command, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_FAILURE);
assertNotNull(command.getExecutionException());
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
@Test
public void testShortCircuitFallbackCounter() {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker().setForceShortCircuit(true);
KnownFailureTestCommandWithFallback command1 = new KnownFailureTestCommandWithFallback(circuitBreaker);
command1.execute();
KnownFailureTestCommandWithFallback command2 = new KnownFailureTestCommandWithFallback(circuitBreaker);
command2.execute();
// will be -1 because it never attempted execution
assertTrue(command1.getExecutionTimeInMilliseconds() == -1);
assertTrue(command1.isResponseShortCircuited());
assertFalse(command1.isResponseTimedOut());
assertNotNull(command1.getExecutionException());
assertCommandExecutionEvents(command1, HystrixEventType.SHORT_CIRCUITED, HystrixEventType.FALLBACK_SUCCESS);
assertCommandExecutionEvents(command2, HystrixEventType.SHORT_CIRCUITED, HystrixEventType.FALLBACK_SUCCESS);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(2);
}
/**
* Test when a command fails to get queued up in the threadpool where the command didn't implement getFallback.
* <p>
* We specifically want to protect against developers getting random thread exceptions and instead just correctly receiving HystrixRuntimeException when no fallback exists.
*/
@Test
public void testRejectedThreadWithNoFallback() {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
SingleThreadedPoolWithQueue pool = new SingleThreadedPoolWithQueue(1);
// fill up the queue
pool.queue.add(new Runnable() {
@Override
public void run() {
System.out.println("**** queue filler1 ****");
try {
Thread.sleep(500);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
});
Future<Boolean> f = null;
TestCommandRejection command1 = null;
TestCommandRejection command2 = null;
try {
command1 = new TestCommandRejection(circuitBreaker, pool, 500, 600, TestCommandRejection.FALLBACK_NOT_IMPLEMENTED);
f = command1.queue();
command2 = new TestCommandRejection(circuitBreaker, pool, 500, 600, TestCommandRejection.FALLBACK_NOT_IMPLEMENTED);
command2.queue();
fail("we shouldn't get here");
} catch (Exception e) {
e.printStackTrace();
System.out.println("command.getExecutionTimeInMilliseconds(): " + command2.getExecutionTimeInMilliseconds());
// will be -1 because it never attempted execution
assertTrue(command2.getExecutionTimeInMilliseconds() == -1);
assertTrue(command2.isResponseRejected());
assertFalse(command2.isResponseShortCircuited());
assertFalse(command2.isResponseTimedOut());
assertNotNull(command2.getExecutionException());
if (e instanceof HystrixRuntimeException && e.getCause() instanceof RejectedExecutionException) {
HystrixRuntimeException de = (HystrixRuntimeException) e;
assertNotNull(de.getFallbackException());
assertTrue(de.getFallbackException() instanceof UnsupportedOperationException);
assertNotNull(de.getImplementingClass());
assertNotNull(de.getCause());
assertTrue(de.getCause() instanceof RejectedExecutionException);
} else {
fail("the exception should be HystrixRuntimeException with cause as RejectedExecutionException");
}
}
try {
f.get();
} catch (Exception e) {
e.printStackTrace();
fail("The first one should succeed.");
}
assertCommandExecutionEvents(command1, HystrixEventType.SUCCESS);
assertCommandExecutionEvents(command2, HystrixEventType.THREAD_POOL_REJECTED, HystrixEventType.FALLBACK_MISSING);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(2);
}
/**
* Test when a command fails to get queued up in the threadpool where the command implemented getFallback.
* <p>
* We specifically want to protect against developers getting random thread exceptions and instead just correctly receives a fallback.
*/
@Test
public void testRejectedThreadWithFallback() {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
SingleThreadedPoolWithQueue pool = new SingleThreadedPoolWithQueue(1);
// fill up the queue
pool.queue.add(new Runnable() {
@Override
public void run() {
System.out.println("**** queue filler1 ****");
try {
Thread.sleep(500);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
});
TestCommandRejection command1 = new TestCommandRejection(circuitBreaker, pool, 500, 600, TestCommandRejection.FALLBACK_SUCCESS);
TestCommandRejection command2 = new TestCommandRejection(circuitBreaker, pool, 500, 600, TestCommandRejection.FALLBACK_SUCCESS);
Future<?> f1 = null;
try {
f1 = command1.queue();
assertEquals(false, command2.queue().get());
assertFalse(command1.isResponseRejected());
assertFalse(command1.isResponseFromFallback());
assertNull(command1.getExecutionException());
assertTrue(command2.isResponseRejected());
assertTrue(command2.isResponseFromFallback());
assertNotNull(command2.getExecutionException());
} catch (Exception e) {
e.printStackTrace();
fail("We should have received a response from the fallback.");
}
assertCommandExecutionEvents(command1); //no events yet
assertCommandExecutionEvents(command2, HystrixEventType.THREAD_POOL_REJECTED, HystrixEventType.FALLBACK_SUCCESS);
assertEquals(1, circuitBreaker.metrics.getCurrentConcurrentExecutionCount()); //pool-filler still going
//This is a case where we knowingly walk away from an executing Hystrix thread (the pool-filler). It should have an in-flight status ("Executed"). You should avoid this in a production environment
HystrixRequestLog requestLog = HystrixRequestLog.getCurrentRequest();
assertEquals(2, requestLog.getAllExecutedCommands().size());
assertTrue(requestLog.getExecutedCommandsAsString().contains("Executed"));
try {
//block on the outstanding work, so we don't inadvertantly afect any other tests
f1.get();
} catch (Exception ex) {
fail("Exception while blocking on Future");
}
}
/**
* Test when a command fails to get queued up in the threadpool where the command implemented getFallback but it fails.
* <p>
* We specifically want to protect against developers getting random thread exceptions and instead just correctly receives an HystrixRuntimeException.
*/
@Test
public void testRejectedThreadWithFallbackFailure() {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
SingleThreadedPoolWithQueue pool = new SingleThreadedPoolWithQueue(1);
// fill up the queue
pool.queue.add(new Runnable() {
@Override
public void run() {
System.out.println("**** queue filler1 ****");
try {
Thread.sleep(500);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
});
TestCommandRejection command1 = new TestCommandRejection(circuitBreaker, pool, 500, 600, TestCommandRejection.FALLBACK_FAILURE);
TestCommandRejection command2 = new TestCommandRejection(circuitBreaker, pool, 500, 600, TestCommandRejection.FALLBACK_FAILURE);
Future<?> f1 = null;
try {
f1 = command1.queue();
assertEquals(false, command2.queue().get());
fail("we shouldn't get here");
} catch (Exception e) {
e.printStackTrace();
if (e instanceof HystrixRuntimeException && e.getCause() instanceof RejectedExecutionException) {
HystrixRuntimeException de = (HystrixRuntimeException) e;
assertNotNull(de.getFallbackException());
assertFalse(de.getFallbackException() instanceof UnsupportedOperationException);
assertNotNull(de.getImplementingClass());
assertNotNull(de.getCause());
assertTrue(de.getCause() instanceof RejectedExecutionException);
} else {
fail("the exception should be HystrixRuntimeException with cause as RejectedExecutionException");
}
}
assertCommandExecutionEvents(command1); //still in-flight, no events yet
assertCommandExecutionEvents(command2, HystrixEventType.THREAD_POOL_REJECTED, HystrixEventType.FALLBACK_FAILURE);
assertEquals(1, circuitBreaker.metrics.getCurrentConcurrentExecutionCount()); //pool-filler still going
//This is a case where we knowingly walk away from an executing Hystrix thread (the pool-filler). It should have an in-flight status ("Executed"). You should avoid this in a production environment
HystrixRequestLog requestLog = HystrixRequestLog.getCurrentRequest();
assertEquals(2, requestLog.getAllExecutedCommands().size());
assertTrue(requestLog.getExecutedCommandsAsString().contains("Executed"));
try {
//block on the outstanding work, so we don't inadvertantly afect any other tests
f1.get();
} catch (Exception ex) {
fail("Exception while blocking on Future");
}
}
/**
* Test that we can reject a thread using isQueueSpaceAvailable() instead of just when the pool rejects.
* <p>
* For example, we have queue size set to 100 but want to reject when we hit 10.
* <p>
* This allows us to use FastProperties to control our rejection point whereas we can't resize a queue after it's created.
*/
@Test
public void testRejectedThreadUsingQueueSize() {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
SingleThreadedPoolWithQueue pool = new SingleThreadedPoolWithQueue(10, 1);
// put 1 item in the queue
// the thread pool won't pick it up because we're bypassing the pool and adding to the queue directly so this will keep the queue full
pool.queue.add(new Runnable() {
@Override
public void run() {
System.out.println("**** queue filler1 ****");
try {
Thread.sleep(500);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
});
TestCommandRejection command = new TestCommandRejection(circuitBreaker, pool, 500, 600, TestCommandRejection.FALLBACK_NOT_IMPLEMENTED);
try {
// this should fail as we already have 1 in the queue
command.queue();
fail("we shouldn't get here");
} catch (Exception e) {
e.printStackTrace();
// will be -1 because it never attempted execution
assertTrue(command.getExecutionTimeInMilliseconds() == -1);
assertTrue(command.isResponseRejected());
assertFalse(command.isResponseShortCircuited());
assertFalse(command.isResponseTimedOut());
assertNotNull(command.getExecutionException());
if (e instanceof HystrixRuntimeException && e.getCause() instanceof RejectedExecutionException) {
HystrixRuntimeException de = (HystrixRuntimeException) e;
assertNotNull(de.getFallbackException());
assertTrue(de.getFallbackException() instanceof UnsupportedOperationException);
assertNotNull(de.getImplementingClass());
assertNotNull(de.getCause());
assertTrue(de.getCause() instanceof RejectedExecutionException);
} else {
fail("the exception should be HystrixRuntimeException with cause as RejectedExecutionException");
}
}
assertCommandExecutionEvents(command, HystrixEventType.THREAD_POOL_REJECTED, HystrixEventType.FALLBACK_MISSING);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* If it has been sitting in the queue, it should not execute if timed out by the time it hits the queue.
*/
@Test
public void testTimedOutCommandDoesNotExecute() {
SingleThreadedPoolWithQueue pool = new SingleThreadedPoolWithQueue(5);
TestCircuitBreaker s1 = new TestCircuitBreaker();
TestCircuitBreaker s2 = new TestCircuitBreaker();
// execution will take 100ms, thread pool has a 600ms timeout
CommandWithCustomThreadPool c1 = new CommandWithCustomThreadPool(s1, pool, 500, HystrixCommandPropertiesTest.getUnitTestPropertiesSetter().withExecutionTimeoutInMilliseconds(600));
// execution will take 200ms, thread pool has a 20ms timeout
CommandWithCustomThreadPool c2 = new CommandWithCustomThreadPool(s2, pool, 200, HystrixCommandPropertiesTest.getUnitTestPropertiesSetter().withExecutionTimeoutInMilliseconds(20));
// queue up c1 first
Future<Boolean> c1f = c1.queue();
// now queue up c2 and wait on it
boolean receivedException = false;
try {
c2.queue().get();
} catch (Exception e) {
// we expect to get an exception here
receivedException = true;
}
if (!receivedException) {
fail("We expect to receive an exception for c2 as it's supposed to timeout.");
}
// c1 will complete after 100ms
try {
c1f.get();
} catch (Exception e1) {
e1.printStackTrace();
fail("we should not have failed while getting c1");
}
assertTrue("c1 is expected to executed but didn't", c1.didExecute);
// c2 will timeout after 20 ms ... we'll wait longer than the 200ms time to make sure
// the thread doesn't keep running in the background and execute
try {
Thread.sleep(400);
} catch (Exception e) {
throw new RuntimeException("Failed to sleep");
}
assertFalse("c2 is not expected to execute, but did", c2.didExecute);
assertCommandExecutionEvents(c1, HystrixEventType.SUCCESS);
assertEquals(0, s1.metrics.getCurrentConcurrentExecutionCount());
assertCommandExecutionEvents(c2, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_MISSING);
assertEquals(0, s2.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(2);
}
@Test
public void testDisabledTimeoutWorks() {
CommandWithDisabledTimeout cmd = new CommandWithDisabledTimeout(100, 900);
boolean result = false;
try {
result = cmd.execute();
} catch (Throwable ex) {
ex.printStackTrace();
fail("should not fail");
}
assertEquals(true, result);
assertFalse(cmd.isResponseTimedOut());
assertNull(cmd.getExecutionException());
System.out.println("CMD : " + cmd.currentRequestLog.getExecutedCommandsAsString());
assertTrue(cmd.executionResult.getExecutionTime() >= 900);
assertCommandExecutionEvents(cmd, HystrixEventType.SUCCESS);
}
@Test
public void testFallbackSemaphore() {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
// single thread should work
TestSemaphoreCommandWithSlowFallback command1 = null;
try {
command1 = new TestSemaphoreCommandWithSlowFallback(circuitBreaker, 1, 200);
boolean result = command1.queue().get();
assertTrue(result);
} catch (Exception e) {
// we shouldn't fail on this one
throw new RuntimeException(e);
}
// 2 threads, the second should be rejected by the fallback semaphore
boolean exceptionReceived = false;
Future<Boolean> result2 = null;
TestSemaphoreCommandWithSlowFallback command2 = null;
TestSemaphoreCommandWithSlowFallback command3 = null;
try {
System.out.println("c2 start: " + System.currentTimeMillis());
command2 = new TestSemaphoreCommandWithSlowFallback(circuitBreaker, 1, 800);
result2 = command2.queue();
System.out.println("c2 after queue: " + System.currentTimeMillis());
// make sure that thread gets a chance to run before queuing the next one
Thread.sleep(50);
System.out.println("c3 start: " + System.currentTimeMillis());
command3 = new TestSemaphoreCommandWithSlowFallback(circuitBreaker, 1, 200);
Future<Boolean> result3 = command3.queue();
System.out.println("c3 after queue: " + System.currentTimeMillis());
result3.get();
} catch (Exception e) {
e.printStackTrace();
exceptionReceived = true;
}
try {
assertTrue(result2.get());
} catch (Exception e) {
throw new RuntimeException(e);
}
if (!exceptionReceived) {
fail("We expected an exception on the 2nd get");
}
assertCommandExecutionEvents(command1, HystrixEventType.FAILURE, HystrixEventType.FALLBACK_SUCCESS);
assertCommandExecutionEvents(command2, HystrixEventType.FAILURE, HystrixEventType.FALLBACK_SUCCESS);
assertCommandExecutionEvents(command3, HystrixEventType.FAILURE, HystrixEventType.FALLBACK_REJECTION);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(3);
}
@Test
public void testExecutionSemaphoreWithQueue() throws InterruptedException {
final TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
// single thread should work
TestSemaphoreCommand command1 = null;
try {
command1 = new TestSemaphoreCommand(circuitBreaker, 1, 200, TestSemaphoreCommand.RESULT_SUCCESS, TestSemaphoreCommand.FALLBACK_NOT_IMPLEMENTED);
boolean result = command1.queue().get();
assertTrue(result);
} catch (Exception e) {
// we shouldn't fail on this one
throw new RuntimeException(e);
}
final AtomicBoolean exceptionReceived = new AtomicBoolean();
final TryableSemaphore semaphore =
new TryableSemaphoreActual(HystrixProperty.Factory.asProperty(1));
final TestSemaphoreCommand command2 = new TestSemaphoreCommand(circuitBreaker, semaphore, 200, TestSemaphoreCommand.RESULT_SUCCESS, TestSemaphoreCommand.FALLBACK_NOT_IMPLEMENTED);
Runnable r2 = new HystrixContextRunnable(HystrixPlugins.getInstance().getConcurrencyStrategy(), new Runnable() {
@Override
public void run() {
try {
command2.queue().get();
} catch (Exception e) {
e.printStackTrace();
exceptionReceived.set(true);
}
}
});
final TestSemaphoreCommand command3 = new TestSemaphoreCommand(circuitBreaker, semaphore, 200, TestSemaphoreCommand.RESULT_SUCCESS, TestSemaphoreCommand.FALLBACK_NOT_IMPLEMENTED);
Runnable r3 = new HystrixContextRunnable(HystrixPlugins.getInstance().getConcurrencyStrategy(), new Runnable() {
@Override
public void run() {
try {
command3.queue().get();
} catch (Exception e) {
e.printStackTrace();
exceptionReceived.set(true);
}
}
});
// 2 threads, the second should be rejected by the semaphore
Thread t2 = new Thread(r2);
Thread t3 = new Thread(r3);
t2.start();
// make sure that t2 gets a chance to run before queuing the next one
Thread.sleep(50);
t3.start();
try {
t2.join();
t3.join();
} catch (Exception e) {
e.printStackTrace();
fail("failed waiting on threads");
}
if (!exceptionReceived.get()) {
fail("We expected an exception on the 2nd get");
}
assertCommandExecutionEvents(command1, HystrixEventType.SUCCESS);
assertCommandExecutionEvents(command2, HystrixEventType.SUCCESS);
assertCommandExecutionEvents(command3, HystrixEventType.SEMAPHORE_REJECTED, HystrixEventType.FALLBACK_MISSING);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(3);
}
@Test
public void testExecutionSemaphoreWithExecution() throws InterruptedException {
final TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
// single thread should work
TestSemaphoreCommand command1 = new TestSemaphoreCommand(circuitBreaker, 1, 200, TestSemaphoreCommand.RESULT_SUCCESS, TestSemaphoreCommand.FALLBACK_NOT_IMPLEMENTED);
boolean result = command1.execute();
assertFalse(command1.isExecutedInThread());
assertTrue(result);
final ArrayBlockingQueue<Boolean> results = new ArrayBlockingQueue<Boolean>(2);
final AtomicBoolean exceptionReceived = new AtomicBoolean();
final TryableSemaphore semaphore =
new TryableSemaphoreActual(HystrixProperty.Factory.asProperty(1));
final TestSemaphoreCommand command2 = new TestSemaphoreCommand(circuitBreaker, semaphore, 200, TestSemaphoreCommand.RESULT_SUCCESS, TestSemaphoreCommand.FALLBACK_NOT_IMPLEMENTED);
Runnable r2 = new HystrixContextRunnable(HystrixPlugins.getInstance().getConcurrencyStrategy(), new Runnable() {
@Override
public void run() {
try {
results.add(command2.execute());
} catch (Exception e) {
e.printStackTrace();
exceptionReceived.set(true);
}
}
});
final TestSemaphoreCommand command3 = new TestSemaphoreCommand(circuitBreaker, semaphore, 200, TestSemaphoreCommand.RESULT_SUCCESS, TestSemaphoreCommand.FALLBACK_NOT_IMPLEMENTED);
Runnable r3 = new HystrixContextRunnable(HystrixPlugins.getInstance().getConcurrencyStrategy(), new Runnable() {
@Override
public void run() {
try {
results.add(command3.execute());
} catch (Exception e) {
e.printStackTrace();
exceptionReceived.set(true);
}
}
});
// 2 threads, the second should be rejected by the semaphore
Thread t2 = new Thread(r2);
Thread t3 = new Thread(r3);
t2.start();
// make sure that t2 gets a chance to run before queuing the next one
Thread.sleep(50);
t3.start();
try {
t2.join();
t3.join();
} catch (Exception e) {
e.printStackTrace();
fail("failed waiting on threads");
}
if (!exceptionReceived.get()) {
fail("We expected an exception on the 2nd get");
}
// only 1 value is expected as the other should have thrown an exception
assertEquals(1, results.size());
// should contain only a true result
assertTrue(results.contains(Boolean.TRUE));
assertFalse(results.contains(Boolean.FALSE));
assertCommandExecutionEvents(command1, HystrixEventType.SUCCESS);
assertCommandExecutionEvents(command2, HystrixEventType.SUCCESS);
assertCommandExecutionEvents(command3, HystrixEventType.SEMAPHORE_REJECTED, HystrixEventType.FALLBACK_MISSING);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(3);
}
@Test
public void testRejectedExecutionSemaphoreWithFallbackViaExecute() throws InterruptedException {
final TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
final ArrayBlockingQueue<Boolean> results = new ArrayBlockingQueue<Boolean>(2);
final AtomicBoolean exceptionReceived = new AtomicBoolean();
final TestSemaphoreCommandWithFallback command1 = new TestSemaphoreCommandWithFallback(circuitBreaker, 1, 200, false);
Runnable r1 = new HystrixContextRunnable(HystrixPlugins.getInstance().getConcurrencyStrategy(), new Runnable() {
@Override
public void run() {
try {
results.add(command1.execute());
} catch (Exception e) {
e.printStackTrace();
exceptionReceived.set(true);
}
}
});
final TestSemaphoreCommandWithFallback command2 = new TestSemaphoreCommandWithFallback(circuitBreaker, 1, 200, false);
Runnable r2 = new HystrixContextRunnable(HystrixPlugins.getInstance().getConcurrencyStrategy(), new Runnable() {
@Override
public void run() {
try {
results.add(command2.execute());
} catch (Exception e) {
e.printStackTrace();
exceptionReceived.set(true);
}
}
});
// 2 threads, the second should be rejected by the semaphore and return fallback
Thread t1 = new Thread(r1);
Thread t2 = new Thread(r2);
t1.start();
// make sure that t2 gets a chance to run before queuing the next one
Thread.sleep(50);
t2.start();
try {
t1.join();
t2.join();
} catch (Exception e) {
e.printStackTrace();
fail("failed waiting on threads");
}
if (exceptionReceived.get()) {
fail("We should have received a fallback response");
}
// both threads should have returned values
assertEquals(2, results.size());
// should contain both a true and false result
assertTrue(results.contains(Boolean.TRUE));
assertTrue(results.contains(Boolean.FALSE));
assertCommandExecutionEvents(command1, HystrixEventType.SUCCESS);
assertCommandExecutionEvents(command2, HystrixEventType.SEMAPHORE_REJECTED, HystrixEventType.FALLBACK_SUCCESS);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(2);
}
@Test
public void testRejectedExecutionSemaphoreWithFallbackViaObserve() throws InterruptedException {
final TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
final ArrayBlockingQueue<Observable<Boolean>> results = new ArrayBlockingQueue<Observable<Boolean>>(2);
final AtomicBoolean exceptionReceived = new AtomicBoolean();
final TestSemaphoreCommandWithFallback command1 = new TestSemaphoreCommandWithFallback(circuitBreaker, 1, 200, false);
Runnable r1 = new HystrixContextRunnable(HystrixPlugins.getInstance().getConcurrencyStrategy(), new Runnable() {
@Override
public void run() {
try {
results.add(command1.observe());
} catch (Exception e) {
e.printStackTrace();
exceptionReceived.set(true);
}
}
});
final TestSemaphoreCommandWithFallback command2 = new TestSemaphoreCommandWithFallback(circuitBreaker, 1, 200, false);
Runnable r2 = new HystrixContextRunnable(HystrixPlugins.getInstance().getConcurrencyStrategy(), new Runnable() {
@Override
public void run() {
try {
results.add(command2.observe());
} catch (Exception e) {
e.printStackTrace();
exceptionReceived.set(true);
}
}
});
// 2 threads, the second should be rejected by the semaphore and return fallback
Thread t1 = new Thread(r1);
Thread t2 = new Thread(r2);
t1.start();
// make sure that t2 gets a chance to run before queuing the next one
Thread.sleep(50);
t2.start();
try {
t1.join();
t2.join();
} catch (Exception e) {
e.printStackTrace();
fail("failed waiting on threads");
}
if (exceptionReceived.get()) {
fail("We should have received a fallback response");
}
final List<Boolean> blockingList = Observable.merge(results).toList().toBlocking().single();
// both threads should have returned values
assertEquals(2, blockingList.size());
// should contain both a true and false result
assertTrue(blockingList.contains(Boolean.TRUE));
assertTrue(blockingList.contains(Boolean.FALSE));
assertCommandExecutionEvents(command1, HystrixEventType.SUCCESS);
assertCommandExecutionEvents(command2, HystrixEventType.SEMAPHORE_REJECTED, HystrixEventType.FALLBACK_SUCCESS);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(2);
}
/**
* Tests that semaphores are counted separately for commands with unique keys
*/
@Test
public void testSemaphorePermitsInUse() {
final TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
// this semaphore will be shared across multiple command instances
final TryableSemaphoreActual sharedSemaphore =
new TryableSemaphoreActual(HystrixProperty.Factory.asProperty(3));
// used to wait until all commands have started
final CountDownLatch startLatch = new CountDownLatch((sharedSemaphore.numberOfPermits.get() * 2) + 1);
// used to signal that all command can finish
final CountDownLatch sharedLatch = new CountDownLatch(1);
// tracks failures to obtain semaphores
final AtomicInteger failureCount = new AtomicInteger();
final Runnable sharedSemaphoreRunnable = new HystrixContextRunnable(HystrixPlugins.getInstance().getConcurrencyStrategy(), new Runnable() {
public void run() {
try {
new LatchedSemaphoreCommand(circuitBreaker, sharedSemaphore, startLatch, sharedLatch).execute();
} catch (Exception e) {
startLatch.countDown();
e.printStackTrace();
failureCount.incrementAndGet();
}
}
});
// creates group of threads each using command sharing a single semaphore
// I create extra threads and commands so that I can verify that some of them fail to obtain a semaphore
final int sharedThreadCount = sharedSemaphore.numberOfPermits.get() * 2;
final Thread[] sharedSemaphoreThreads = new Thread[sharedThreadCount];
for (int i = 0; i < sharedThreadCount; i++) {
sharedSemaphoreThreads[i] = new Thread(sharedSemaphoreRunnable);
}
// creates thread using isolated semaphore
final TryableSemaphoreActual isolatedSemaphore =
new TryableSemaphoreActual(HystrixProperty.Factory.asProperty(1));
final CountDownLatch isolatedLatch = new CountDownLatch(1);
final Thread isolatedThread = new Thread(new HystrixContextRunnable(HystrixPlugins.getInstance().getConcurrencyStrategy(), new Runnable() {
public void run() {
try {
new LatchedSemaphoreCommand(circuitBreaker, isolatedSemaphore, startLatch, isolatedLatch).execute();
} catch (Exception e) {
startLatch.countDown();
e.printStackTrace();
failureCount.incrementAndGet();
}
}
}));
// verifies no permits in use before starting threads
assertEquals("before threads start, shared semaphore should be unused", 0, sharedSemaphore.getNumberOfPermitsUsed());
assertEquals("before threads start, isolated semaphore should be unused", 0, isolatedSemaphore.getNumberOfPermitsUsed());
for (int i = 0; i < sharedThreadCount; i++) {
sharedSemaphoreThreads[i].start();
}
isolatedThread.start();
// waits until all commands have started
try {
startLatch.await(1000, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
// verifies that all semaphores are in use
assertEquals("immediately after command start, all shared semaphores should be in-use",
sharedSemaphore.numberOfPermits.get().longValue(), sharedSemaphore.getNumberOfPermitsUsed());
assertEquals("immediately after command start, isolated semaphore should be in-use",
isolatedSemaphore.numberOfPermits.get().longValue(), isolatedSemaphore.getNumberOfPermitsUsed());
// signals commands to finish
sharedLatch.countDown();
isolatedLatch.countDown();
try {
for (int i = 0; i < sharedThreadCount; i++) {
sharedSemaphoreThreads[i].join();
}
isolatedThread.join();
} catch (Exception e) {
e.printStackTrace();
fail("failed waiting on threads");
}
// verifies no permits in use after finishing threads
assertEquals("after all threads have finished, no shared semaphores should be in-use", 0, sharedSemaphore.getNumberOfPermitsUsed());
assertEquals("after all threads have finished, isolated semaphore not in-use", 0, isolatedSemaphore.getNumberOfPermitsUsed());
// verifies that some executions failed
assertEquals("expected some of shared semaphore commands to get rejected", sharedSemaphore.numberOfPermits.get().longValue(), failureCount.get());
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
}
/**
* Test that HystrixOwner can be passed in dynamically.
*/
@Test
public void testDynamicOwner() {
try {
TestHystrixCommand<Boolean> command = new DynamicOwnerTestCommand(InspectableBuilder.CommandGroupForUnitTest.OWNER_ONE);
assertEquals(true, command.execute());
assertCommandExecutionEvents(command, HystrixEventType.SUCCESS);
} catch (Exception e) {
e.printStackTrace();
fail("We received an exception.");
}
}
/**
* Test a successful command execution.
*/
@Test
public void testDynamicOwnerFails() {
try {
TestHystrixCommand<Boolean> command = new DynamicOwnerTestCommand(null);
assertEquals(true, command.execute());
fail("we should have thrown an exception as we need an owner");
} catch (Exception e) {
// success if we get here
}
}
/**
* Test that HystrixCommandKey can be passed in dynamically.
*/
@Test
public void testDynamicKey() {
try {
DynamicOwnerAndKeyTestCommand command1 = new DynamicOwnerAndKeyTestCommand(InspectableBuilder.CommandGroupForUnitTest.OWNER_ONE, InspectableBuilder.CommandKeyForUnitTest.KEY_ONE);
assertEquals(true, command1.execute());
DynamicOwnerAndKeyTestCommand command2 = new DynamicOwnerAndKeyTestCommand(InspectableBuilder.CommandGroupForUnitTest.OWNER_ONE, InspectableBuilder.CommandKeyForUnitTest.KEY_TWO);
assertEquals(true, command2.execute());
// 2 different circuit breakers should be created
assertNotSame(command1.getCircuitBreaker(), command2.getCircuitBreaker());
} catch (Exception e) {
e.printStackTrace();
fail("We received an exception.");
}
}
/**
* Test Request scoped caching of commands so that a 2nd duplicate call doesn't execute but returns the previous Future
*/
@Test
public void testRequestCache1() {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
SuccessfulCacheableCommand<String> command1 = new SuccessfulCacheableCommand<String>(circuitBreaker, true, "A");
SuccessfulCacheableCommand<String> command2 = new SuccessfulCacheableCommand<String>(circuitBreaker, true, "A");
assertTrue(command1.isCommandRunningInThread());
Future<String> f1 = command1.queue();
Future<String> f2 = command2.queue();
try {
assertEquals("A", f1.get());
assertEquals("A", f2.get());
} catch (Exception e) {
throw new RuntimeException(e);
}
assertTrue(command1.executed);
// the second one should not have executed as it should have received the cached value instead
assertFalse(command2.executed);
assertTrue(command1.getExecutionTimeInMilliseconds() > -1);
assertFalse(command1.isResponseFromCache());
assertTrue(command2.getExecutionTimeInMilliseconds() == -1);
assertTrue(command2.isResponseFromCache());
assertCommandExecutionEvents(command1, HystrixEventType.SUCCESS);
assertCommandExecutionEvents(command2, HystrixEventType.SUCCESS, HystrixEventType.RESPONSE_FROM_CACHE);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(2);
}
/**
* Test Request scoped caching doesn't prevent different ones from executing
*/
@Test
public void testRequestCache2() {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
SuccessfulCacheableCommand<String> command1 = new SuccessfulCacheableCommand<String>(circuitBreaker, true, "A");
SuccessfulCacheableCommand<String> command2 = new SuccessfulCacheableCommand<String>(circuitBreaker, true, "B");
assertTrue(command1.isCommandRunningInThread());
Future<String> f1 = command1.queue();
Future<String> f2 = command2.queue();
try {
assertEquals("A", f1.get());
assertEquals("B", f2.get());
} catch (Exception e) {
throw new RuntimeException(e);
}
assertTrue(command1.executed);
// both should execute as they are different
assertTrue(command2.executed);
assertTrue(command2.getExecutionTimeInMilliseconds() > -1);
assertFalse(command2.isResponseFromCache());
assertCommandExecutionEvents(command1, HystrixEventType.SUCCESS);
assertCommandExecutionEvents(command2, HystrixEventType.SUCCESS);
assertNull(command1.getExecutionException());
assertFalse(command2.isResponseFromCache());
assertNull(command2.getExecutionException());
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(2);
}
/**
* Test Request scoped caching with a mixture of commands
*/
@Test
public void testRequestCache3() {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
SuccessfulCacheableCommand<String> command1 = new SuccessfulCacheableCommand<String>(circuitBreaker, true, "A");
SuccessfulCacheableCommand<String> command2 = new SuccessfulCacheableCommand<String>(circuitBreaker, true, "B");
SuccessfulCacheableCommand<String> command3 = new SuccessfulCacheableCommand<String>(circuitBreaker, true, "A");
assertTrue(command1.isCommandRunningInThread());
Future<String> f1 = command1.queue();
Future<String> f2 = command2.queue();
Future<String> f3 = command3.queue();
try {
assertEquals("A", f1.get());
assertEquals("B", f2.get());
assertEquals("A", f3.get());
} catch (Exception e) {
throw new RuntimeException(e);
}
assertTrue(command1.executed);
// both should execute as they are different
assertTrue(command2.executed);
// but the 3rd should come from cache
assertFalse(command3.executed);
assertTrue(command3.getExecutionTimeInMilliseconds() == -1);
assertTrue(command3.isResponseFromCache());
assertCommandExecutionEvents(command1, HystrixEventType.SUCCESS);
assertCommandExecutionEvents(command2, HystrixEventType.SUCCESS);
assertCommandExecutionEvents(command3, HystrixEventType.SUCCESS, HystrixEventType.RESPONSE_FROM_CACHE);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(3);
}
/**
* Test Request scoped caching of commands so that a 2nd duplicate call doesn't execute but returns the previous Future
*/
@Test
public void testRequestCacheWithSlowExecution() {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
SlowCacheableCommand command1 = new SlowCacheableCommand(circuitBreaker, "A", 200);
SlowCacheableCommand command2 = new SlowCacheableCommand(circuitBreaker, "A", 100);
SlowCacheableCommand command3 = new SlowCacheableCommand(circuitBreaker, "A", 100);
SlowCacheableCommand command4 = new SlowCacheableCommand(circuitBreaker, "A", 100);
Future<String> f1 = command1.queue();
Future<String> f2 = command2.queue();
Future<String> f3 = command3.queue();
Future<String> f4 = command4.queue();
try {
assertEquals("A", f2.get());
assertEquals("A", f3.get());
assertEquals("A", f4.get());
assertEquals("A", f1.get());
} catch (Exception e) {
throw new RuntimeException(e);
}
assertTrue(command1.executed);
// the second one should not have executed as it should have received the cached value instead
assertFalse(command2.executed);
assertFalse(command3.executed);
assertFalse(command4.executed);
assertTrue(command1.getExecutionTimeInMilliseconds() > -1);
assertFalse(command1.isResponseFromCache());
assertTrue(command2.getExecutionTimeInMilliseconds() == -1);
assertTrue(command2.isResponseFromCache());
assertTrue(command3.isResponseFromCache());
assertTrue(command3.getExecutionTimeInMilliseconds() == -1);
assertTrue(command4.isResponseFromCache());
assertTrue(command4.getExecutionTimeInMilliseconds() == -1);
assertCommandExecutionEvents(command1, HystrixEventType.SUCCESS);
assertCommandExecutionEvents(command2, HystrixEventType.SUCCESS, HystrixEventType.RESPONSE_FROM_CACHE);
assertCommandExecutionEvents(command3, HystrixEventType.SUCCESS, HystrixEventType.RESPONSE_FROM_CACHE);
assertCommandExecutionEvents(command4, HystrixEventType.SUCCESS, HystrixEventType.RESPONSE_FROM_CACHE);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(4);
System.out.println("HystrixRequestLog: " + HystrixRequestLog.getCurrentRequest().getExecutedCommandsAsString());
}
/**
* Test Request scoped caching with a mixture of commands
*/
@Test
public void testNoRequestCache3() {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
SuccessfulCacheableCommand<String> command1 = new SuccessfulCacheableCommand<String>(circuitBreaker, false, "A");
SuccessfulCacheableCommand<String> command2 = new SuccessfulCacheableCommand<String>(circuitBreaker, false, "B");
SuccessfulCacheableCommand<String> command3 = new SuccessfulCacheableCommand<String>(circuitBreaker, false, "A");
assertTrue(command1.isCommandRunningInThread());
Future<String> f1 = command1.queue();
Future<String> f2 = command2.queue();
Future<String> f3 = command3.queue();
try {
assertEquals("A", f1.get());
assertEquals("B", f2.get());
assertEquals("A", f3.get());
} catch (Exception e) {
throw new RuntimeException(e);
}
assertTrue(command1.executed);
// both should execute as they are different
assertTrue(command2.executed);
// this should also execute since we disabled the cache
assertTrue(command3.executed);
assertCommandExecutionEvents(command1, HystrixEventType.SUCCESS);
assertCommandExecutionEvents(command2, HystrixEventType.SUCCESS);
assertCommandExecutionEvents(command3, HystrixEventType.SUCCESS);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(3);
}
/**
* Test Request scoped caching with a mixture of commands
*/
@Test
public void testRequestCacheViaQueueSemaphore1() {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
SuccessfulCacheableCommandViaSemaphore command1 = new SuccessfulCacheableCommandViaSemaphore(circuitBreaker, true, "A");
SuccessfulCacheableCommandViaSemaphore command2 = new SuccessfulCacheableCommandViaSemaphore(circuitBreaker, true, "B");
SuccessfulCacheableCommandViaSemaphore command3 = new SuccessfulCacheableCommandViaSemaphore(circuitBreaker, true, "A");
assertFalse(command1.isCommandRunningInThread());
Future<String> f1 = command1.queue();
Future<String> f2 = command2.queue();
Future<String> f3 = command3.queue();
try {
assertEquals("A", f1.get());
assertEquals("B", f2.get());
assertEquals("A", f3.get());
} catch (Exception e) {
throw new RuntimeException(e);
}
assertTrue(command1.executed);
// both should execute as they are different
assertTrue(command2.executed);
// but the 3rd should come from cache
assertFalse(command3.executed);
assertTrue(command3.isResponseFromCache());
assertTrue(command3.getExecutionTimeInMilliseconds() == -1);
assertCommandExecutionEvents(command1, HystrixEventType.SUCCESS);
assertCommandExecutionEvents(command2, HystrixEventType.SUCCESS);
assertCommandExecutionEvents(command3, HystrixEventType.SUCCESS, HystrixEventType.RESPONSE_FROM_CACHE);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(3);
}
/**
* Test Request scoped caching with a mixture of commands
*/
@Test
public void testNoRequestCacheViaQueueSemaphore1() {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
SuccessfulCacheableCommandViaSemaphore command1 = new SuccessfulCacheableCommandViaSemaphore(circuitBreaker, false, "A");
SuccessfulCacheableCommandViaSemaphore command2 = new SuccessfulCacheableCommandViaSemaphore(circuitBreaker, false, "B");
SuccessfulCacheableCommandViaSemaphore command3 = new SuccessfulCacheableCommandViaSemaphore(circuitBreaker, false, "A");
assertFalse(command1.isCommandRunningInThread());
Future<String> f1 = command1.queue();
Future<String> f2 = command2.queue();
Future<String> f3 = command3.queue();
try {
assertEquals("A", f1.get());
assertEquals("B", f2.get());
assertEquals("A", f3.get());
} catch (Exception e) {
throw new RuntimeException(e);
}
assertTrue(command1.executed);
// both should execute as they are different
assertTrue(command2.executed);
// this should also execute because caching is disabled
assertTrue(command3.executed);
assertCommandExecutionEvents(command1, HystrixEventType.SUCCESS);
assertCommandExecutionEvents(command2, HystrixEventType.SUCCESS);
assertCommandExecutionEvents(command3, HystrixEventType.SUCCESS);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(3);
}
/**
* Test Request scoped caching with a mixture of commands
*/
@Test
public void testRequestCacheViaExecuteSemaphore1() {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
SuccessfulCacheableCommandViaSemaphore command1 = new SuccessfulCacheableCommandViaSemaphore(circuitBreaker, true, "A");
SuccessfulCacheableCommandViaSemaphore command2 = new SuccessfulCacheableCommandViaSemaphore(circuitBreaker, true, "B");
SuccessfulCacheableCommandViaSemaphore command3 = new SuccessfulCacheableCommandViaSemaphore(circuitBreaker, true, "A");
assertFalse(command1.isCommandRunningInThread());
String f1 = command1.execute();
String f2 = command2.execute();
String f3 = command3.execute();
assertEquals("A", f1);
assertEquals("B", f2);
assertEquals("A", f3);
assertTrue(command1.executed);
// both should execute as they are different
assertTrue(command2.executed);
// but the 3rd should come from cache
assertFalse(command3.executed);
assertCommandExecutionEvents(command1, HystrixEventType.SUCCESS);
assertCommandExecutionEvents(command2, HystrixEventType.SUCCESS);
assertCommandExecutionEvents(command3, HystrixEventType.SUCCESS, HystrixEventType.RESPONSE_FROM_CACHE);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(3);
}
/**
* Test Request scoped caching with a mixture of commands
*/
@Test
public void testNoRequestCacheViaExecuteSemaphore1() {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
SuccessfulCacheableCommandViaSemaphore command1 = new SuccessfulCacheableCommandViaSemaphore(circuitBreaker, false, "A");
SuccessfulCacheableCommandViaSemaphore command2 = new SuccessfulCacheableCommandViaSemaphore(circuitBreaker, false, "B");
SuccessfulCacheableCommandViaSemaphore command3 = new SuccessfulCacheableCommandViaSemaphore(circuitBreaker, false, "A");
assertFalse(command1.isCommandRunningInThread());
String f1 = command1.execute();
String f2 = command2.execute();
String f3 = command3.execute();
assertEquals("A", f1);
assertEquals("B", f2);
assertEquals("A", f3);
assertTrue(command1.executed);
// both should execute as they are different
assertTrue(command2.executed);
// this should also execute because caching is disabled
assertTrue(command3.executed);
assertCommandExecutionEvents(command1, HystrixEventType.SUCCESS);
assertCommandExecutionEvents(command2, HystrixEventType.SUCCESS);
assertCommandExecutionEvents(command3, HystrixEventType.SUCCESS);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(3);
}
@Test
public void testNoRequestCacheOnTimeoutThrowsException() throws Exception {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
NoRequestCacheTimeoutWithoutFallback r1 = new NoRequestCacheTimeoutWithoutFallback(circuitBreaker);
try {
System.out.println("r1 value: " + r1.execute());
// we should have thrown an exception
fail("expected a timeout");
} catch (HystrixRuntimeException e) {
assertTrue(r1.isResponseTimedOut());
// what we want
}
NoRequestCacheTimeoutWithoutFallback r2 = new NoRequestCacheTimeoutWithoutFallback(circuitBreaker);
try {
r2.execute();
// we should have thrown an exception
fail("expected a timeout");
} catch (HystrixRuntimeException e) {
assertTrue(r2.isResponseTimedOut());
// what we want
}
NoRequestCacheTimeoutWithoutFallback r3 = new NoRequestCacheTimeoutWithoutFallback(circuitBreaker);
Future<Boolean> f3 = r3.queue();
try {
f3.get();
// we should have thrown an exception
fail("expected a timeout");
} catch (ExecutionException e) {
e.printStackTrace();
assertTrue(r3.isResponseTimedOut());
// what we want
}
Thread.sleep(500); // timeout on command is set to 200ms
NoRequestCacheTimeoutWithoutFallback r4 = new NoRequestCacheTimeoutWithoutFallback(circuitBreaker);
try {
r4.execute();
// we should have thrown an exception
fail("expected a timeout");
} catch (HystrixRuntimeException e) {
assertTrue(r4.isResponseTimedOut());
assertFalse(r4.isResponseFromFallback());
// what we want
}
assertCommandExecutionEvents(r1, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_MISSING);
assertCommandExecutionEvents(r2, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_MISSING);
assertCommandExecutionEvents(r3, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_MISSING);
assertCommandExecutionEvents(r4, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_MISSING);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(4);
}
@Test
public void testRequestCacheOnTimeoutCausesNullPointerException() throws Exception {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
RequestCacheNullPointerExceptionCase command1 = new RequestCacheNullPointerExceptionCase(circuitBreaker);
RequestCacheNullPointerExceptionCase command2 = new RequestCacheNullPointerExceptionCase(circuitBreaker);
RequestCacheNullPointerExceptionCase command3 = new RequestCacheNullPointerExceptionCase(circuitBreaker);
// Expect it to time out - all results should be false
assertFalse(command1.execute());
assertFalse(command2.execute()); // return from cache #1
assertFalse(command3.execute()); // return from cache #2
Thread.sleep(500); // timeout on command is set to 200ms
RequestCacheNullPointerExceptionCase command4 = new RequestCacheNullPointerExceptionCase(circuitBreaker);
Boolean value = command4.execute(); // return from cache #3
assertFalse(value);
RequestCacheNullPointerExceptionCase command5 = new RequestCacheNullPointerExceptionCase(circuitBreaker);
Future<Boolean> f = command5.queue(); // return from cache #4
// the bug is that we're getting a null Future back, rather than a Future that returns false
assertNotNull(f);
assertFalse(f.get());
assertTrue(command5.isResponseFromFallback());
assertTrue(command5.isResponseTimedOut());
assertFalse(command5.isFailedExecution());
assertFalse(command5.isResponseShortCircuited());
assertNotNull(command5.getExecutionException());
assertCommandExecutionEvents(command1, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_SUCCESS);
assertCommandExecutionEvents(command2, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_SUCCESS, HystrixEventType.RESPONSE_FROM_CACHE);
assertCommandExecutionEvents(command3, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_SUCCESS, HystrixEventType.RESPONSE_FROM_CACHE);
assertCommandExecutionEvents(command4, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_SUCCESS, HystrixEventType.RESPONSE_FROM_CACHE);
assertCommandExecutionEvents(command5, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_SUCCESS, HystrixEventType.RESPONSE_FROM_CACHE);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(5);
}
@Test
public void testRequestCacheOnTimeoutThrowsException() throws Exception {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
RequestCacheTimeoutWithoutFallback r1 = new RequestCacheTimeoutWithoutFallback(circuitBreaker);
try {
System.out.println("r1 value: " + r1.execute());
// we should have thrown an exception
fail("expected a timeout");
} catch (HystrixRuntimeException e) {
assertTrue(r1.isResponseTimedOut());
// what we want
}
RequestCacheTimeoutWithoutFallback r2 = new RequestCacheTimeoutWithoutFallback(circuitBreaker);
try {
r2.execute();
// we should have thrown an exception
fail("expected a timeout");
} catch (HystrixRuntimeException e) {
assertTrue(r2.isResponseTimedOut());
// what we want
}
RequestCacheTimeoutWithoutFallback r3 = new RequestCacheTimeoutWithoutFallback(circuitBreaker);
Future<Boolean> f3 = r3.queue();
try {
f3.get();
// we should have thrown an exception
fail("expected a timeout");
} catch (ExecutionException e) {
e.printStackTrace();
assertTrue(r3.isResponseTimedOut());
// what we want
}
Thread.sleep(500); // timeout on command is set to 200ms
RequestCacheTimeoutWithoutFallback r4 = new RequestCacheTimeoutWithoutFallback(circuitBreaker);
try {
r4.execute();
// we should have thrown an exception
fail("expected a timeout");
} catch (HystrixRuntimeException e) {
assertTrue(r4.isResponseTimedOut());
assertFalse(r4.isResponseFromFallback());
// what we want
}
assertCommandExecutionEvents(r1, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_MISSING);
assertCommandExecutionEvents(r2, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_MISSING, HystrixEventType.RESPONSE_FROM_CACHE);
assertCommandExecutionEvents(r3, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_MISSING, HystrixEventType.RESPONSE_FROM_CACHE);
assertCommandExecutionEvents(r4, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_MISSING, HystrixEventType.RESPONSE_FROM_CACHE);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(4);
}
@Test
public void testRequestCacheOnThreadRejectionThrowsException() throws Exception {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
CountDownLatch completionLatch = new CountDownLatch(1);
RequestCacheThreadRejectionWithoutFallback r1 = new RequestCacheThreadRejectionWithoutFallback(circuitBreaker, completionLatch);
try {
System.out.println("r1: " + r1.execute());
// we should have thrown an exception
fail("expected a rejection");
} catch (HystrixRuntimeException e) {
assertTrue(r1.isResponseRejected());
// what we want
}
RequestCacheThreadRejectionWithoutFallback r2 = new RequestCacheThreadRejectionWithoutFallback(circuitBreaker, completionLatch);
try {
System.out.println("r2: " + r2.execute());
// we should have thrown an exception
fail("expected a rejection");
} catch (HystrixRuntimeException e) {
// e.printStackTrace();
assertTrue(r2.isResponseRejected());
// what we want
}
RequestCacheThreadRejectionWithoutFallback r3 = new RequestCacheThreadRejectionWithoutFallback(circuitBreaker, completionLatch);
try {
System.out.println("f3: " + r3.queue().get());
// we should have thrown an exception
fail("expected a rejection");
} catch (HystrixRuntimeException e) {
// e.printStackTrace();
assertTrue(r3.isResponseRejected());
// what we want
}
// let the command finish (only 1 should actually be blocked on this due to the response cache)
completionLatch.countDown();
// then another after the command has completed
RequestCacheThreadRejectionWithoutFallback r4 = new RequestCacheThreadRejectionWithoutFallback(circuitBreaker, completionLatch);
try {
System.out.println("r4: " + r4.execute());
// we should have thrown an exception
fail("expected a rejection");
} catch (HystrixRuntimeException e) {
// e.printStackTrace();
assertTrue(r4.isResponseRejected());
assertFalse(r4.isResponseFromFallback());
// what we want
}
assertCommandExecutionEvents(r1, HystrixEventType.THREAD_POOL_REJECTED, HystrixEventType.FALLBACK_MISSING);
assertCommandExecutionEvents(r2, HystrixEventType.THREAD_POOL_REJECTED, HystrixEventType.FALLBACK_MISSING, HystrixEventType.RESPONSE_FROM_CACHE);
assertCommandExecutionEvents(r3, HystrixEventType.THREAD_POOL_REJECTED, HystrixEventType.FALLBACK_MISSING, HystrixEventType.RESPONSE_FROM_CACHE);
assertCommandExecutionEvents(r4, HystrixEventType.THREAD_POOL_REJECTED, HystrixEventType.FALLBACK_MISSING, HystrixEventType.RESPONSE_FROM_CACHE);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(4);
}
/**
* Test that we can do basic execution without a RequestVariable being initialized.
*/
@Test
public void testBasicExecutionWorksWithoutRequestVariable() {
try {
/* force the RequestVariable to not be initialized */
HystrixRequestContext.setContextOnCurrentThread(null);
TestHystrixCommand<Boolean> command = new SuccessfulTestCommand();
assertEquals(true, command.execute());
TestHystrixCommand<Boolean> command2 = new SuccessfulTestCommand();
assertEquals(true, command2.queue().get());
// we should be able to execute without a RequestVariable if ...
// 1) We don't have a cacheKey
// 2) We don't ask for the RequestLog
// 3) We don't do collapsing
} catch (Exception e) {
e.printStackTrace();
fail("We received an exception => " + e.getMessage());
}
}
/**
* Test that if we try and execute a command with a cacheKey without initializing RequestVariable that it gives an error.
*/
@Test
public void testCacheKeyExecutionRequiresRequestVariable() {
try {
/* force the RequestVariable to not be initialized */
HystrixRequestContext.setContextOnCurrentThread(null);
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
SuccessfulCacheableCommand command = new SuccessfulCacheableCommand<String>(circuitBreaker, true, "one");
assertEquals("one", command.execute());
SuccessfulCacheableCommand command2 = new SuccessfulCacheableCommand<String>(circuitBreaker, true, "two");
assertEquals("two", command2.queue().get());
fail("We expect an exception because cacheKey requires RequestVariable.");
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* Test that a BadRequestException can be thrown and not count towards errors and bypasses fallback.
*/
@Test
public void testBadRequestExceptionViaExecuteInThread() {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
BadRequestCommand command1 = null;
try {
command1 = new BadRequestCommand(circuitBreaker, ExecutionIsolationStrategy.THREAD);
command1.execute();
fail("we expect to receive a " + HystrixBadRequestException.class.getSimpleName());
} catch (HystrixBadRequestException e) {
// success
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
fail("We expect a " + HystrixBadRequestException.class.getSimpleName() + " but got a " + e.getClass().getSimpleName());
}
assertCommandExecutionEvents(command1, HystrixEventType.BAD_REQUEST);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* Test that a BadRequestException can be thrown and not count towards errors and bypasses fallback.
*/
@Test
public void testBadRequestExceptionViaQueueInThread() {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
BadRequestCommand command1 = null;
try {
command1 = new BadRequestCommand(circuitBreaker, ExecutionIsolationStrategy.THREAD);
command1.queue().get();
fail("we expect to receive a " + HystrixBadRequestException.class.getSimpleName());
} catch (ExecutionException e) {
e.printStackTrace();
if (e.getCause() instanceof HystrixBadRequestException) {
// success
} else {
fail("We expect a " + HystrixBadRequestException.class.getSimpleName() + " but got a " + e.getClass().getSimpleName());
}
} catch (Exception e) {
e.printStackTrace();
fail();
}
assertCommandExecutionEvents(command1, HystrixEventType.BAD_REQUEST);
assertNotNull(command1.getExecutionException());
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* Test that BadRequestException behavior works the same on a cached response.
*/
@Test
public void testBadRequestExceptionViaQueueInThreadOnResponseFromCache() {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
// execute once to cache the value
BadRequestCommand command1 = null;
try {
command1 = new BadRequestCommand(circuitBreaker, ExecutionIsolationStrategy.THREAD);
command1.execute();
} catch (Throwable e) {
// ignore
}
BadRequestCommand command2 = null;
try {
command2 = new BadRequestCommand(circuitBreaker, ExecutionIsolationStrategy.THREAD);
command2.queue().get();
fail("we expect to receive a " + HystrixBadRequestException.class.getSimpleName());
} catch (ExecutionException e) {
e.printStackTrace();
if (e.getCause() instanceof HystrixBadRequestException) {
// success
} else {
fail("We expect a " + HystrixBadRequestException.class.getSimpleName() + " but got a " + e.getClass().getSimpleName());
}
} catch (Exception e) {
e.printStackTrace();
fail();
}
assertCommandExecutionEvents(command1, HystrixEventType.BAD_REQUEST);
assertCommandExecutionEvents(command2, HystrixEventType.BAD_REQUEST, HystrixEventType.RESPONSE_FROM_CACHE);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(2);
}
/**
* Test that a BadRequestException can be thrown and not count towards errors and bypasses fallback.
*/
@Test
public void testBadRequestExceptionViaExecuteInSemaphore() {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
BadRequestCommand command1 = new BadRequestCommand(circuitBreaker, ExecutionIsolationStrategy.SEMAPHORE);
try {
command1.execute();
fail("we expect to receive a " + HystrixBadRequestException.class.getSimpleName());
} catch (HystrixBadRequestException e) {
// success
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
fail("We expect a " + HystrixBadRequestException.class.getSimpleName() + " but got a " + e.getClass().getSimpleName());
}
assertCommandExecutionEvents(command1, HystrixEventType.BAD_REQUEST);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* Test a checked Exception being thrown
*/
@Test
public void testCheckedExceptionViaExecute() {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
CommandWithCheckedException command = new CommandWithCheckedException(circuitBreaker);
try {
command.execute();
fail("we expect to receive a " + Exception.class.getSimpleName());
} catch (Exception e) {
assertEquals("simulated checked exception message", e.getCause().getMessage());
}
assertEquals("simulated checked exception message", command.getFailedExecutionException().getMessage());
assertTrue(command.getExecutionTimeInMilliseconds() > -1);
assertTrue(command.isFailedExecution());
assertCommandExecutionEvents(command, HystrixEventType.FAILURE, HystrixEventType.FALLBACK_MISSING);
assertNotNull(command.getExecutionException());
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
/**
* Test a java.lang.Error being thrown
*
* @throws InterruptedException
*/
@Test
public void testCheckedExceptionViaObserve() throws InterruptedException {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
CommandWithCheckedException command = new CommandWithCheckedException(circuitBreaker);
final AtomicReference<Throwable> t = new AtomicReference<Throwable>();
final CountDownLatch latch = new CountDownLatch(1);
try {
command.observe().subscribe(new Observer<Boolean>() {
@Override
public void onCompleted() {
latch.countDown();
}
@Override
public void onError(Throwable e) {
t.set(e);
latch.countDown();
}
@Override
public void onNext(Boolean args) {
}
});
} catch (Exception e) {
e.printStackTrace();
fail("we should not get anything thrown, it should be emitted via the Observer#onError method");
}
latch.await(1, TimeUnit.SECONDS);
assertNotNull(t.get());
t.get().printStackTrace();
assertTrue(t.get() instanceof HystrixRuntimeException);
assertEquals("simulated checked exception message", t.get().getCause().getMessage());
assertEquals("simulated checked exception message", command.getFailedExecutionException().getMessage());
assertTrue(command.getExecutionTimeInMilliseconds() > -1);
assertTrue(command.isFailedExecution());
assertCommandExecutionEvents(command, HystrixEventType.FAILURE, HystrixEventType.FALLBACK_MISSING);
assertNotNull(command.getExecutionException());
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
@Test
public void testSemaphoreExecutionWithTimeout() {
TestHystrixCommand<?> cmd = new InterruptibleCommand(new TestCircuitBreaker(), false);
System.out.println("Starting command");
long timeMillis = System.currentTimeMillis();
try {
cmd.execute();
fail("Should throw");
} catch (Throwable t) {
assertNotNull(cmd.getExecutionException());
System.out.println("Unsuccessful Execution took : " + (System.currentTimeMillis() - timeMillis));
assertCommandExecutionEvents(cmd, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_MISSING);
assertEquals(0, cmd.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
}
/**
* Test a recoverable java.lang.Error being thrown with no fallback
*/
@Test
public void testRecoverableErrorWithNoFallbackThrowsError() {
TestHystrixCommand<?> command = getRecoverableErrorCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.FallbackResult.UNIMPLEMENTED);
try {
command.execute();
fail("we expect to receive a " + Error.class.getSimpleName());
} catch (Exception e) {
// the actual error is an extra cause level deep because Hystrix needs to wrap Throwable/Error as it's public
// methods only support Exception and it's not a strong enough reason to break backwards compatibility and jump to version 2.x
// so HystrixRuntimeException -> wrapper Exception -> actual Error
assertEquals("Execution ERROR for TestHystrixCommand", e.getCause().getCause().getMessage());
}
assertEquals("Execution ERROR for TestHystrixCommand", command.getFailedExecutionException().getCause().getMessage());
assertTrue(command.getExecutionTimeInMilliseconds() > -1);
assertTrue(command.isFailedExecution());
assertCommandExecutionEvents(command, HystrixEventType.FAILURE, HystrixEventType.FALLBACK_MISSING);
assertNotNull(command.getExecutionException());
assertEquals(0, command.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
@Test
public void testRecoverableErrorMaskedByFallbackButLogged() {
TestHystrixCommand<?> command = getRecoverableErrorCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.FallbackResult.SUCCESS);
try {
assertEquals(FlexibleTestHystrixCommand.FALLBACK_VALUE, command.execute());
} catch (Exception e) {
fail("we expect to receive a valid fallback");
}
assertTrue(command.getExecutionTimeInMilliseconds() > -1);
assertTrue(command.isFailedExecution());
assertCommandExecutionEvents(command, HystrixEventType.FAILURE, HystrixEventType.FALLBACK_SUCCESS);
assertNotNull(command.getExecutionException());
assertEquals(0, command.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
@Test
public void testUnrecoverableErrorThrownWithNoFallback() {
TestHystrixCommand<?> command = getUnrecoverableErrorCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.FallbackResult.UNIMPLEMENTED);
try {
command.execute();
fail("we expect to receive a " + Error.class.getSimpleName());
} catch (Exception e) {
// the actual error is an extra cause level deep because Hystrix needs to wrap Throwable/Error as it's public
// methods only support Exception and it's not a strong enough reason to break backwards compatibility and jump to version 2.x
// so HystrixRuntimeException -> wrapper Exception -> actual Error
assertEquals("Unrecoverable Error for TestHystrixCommand", e.getCause().getCause().getMessage());
}
assertEquals("Unrecoverable Error for TestHystrixCommand", command.getFailedExecutionException().getCause().getMessage());
assertTrue(command.getExecutionTimeInMilliseconds() > -1);
assertTrue(command.isFailedExecution());
assertCommandExecutionEvents(command, HystrixEventType.FAILURE);
assertNotNull(command.getExecutionException());
assertEquals(0, command.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
@Test //even though fallback is implemented, that logic never fires, as this is an unrecoverable error and should be directly propagated to the caller
public void testUnrecoverableErrorThrownWithFallback() {
TestHystrixCommand<?> command = getUnrecoverableErrorCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.FallbackResult.SUCCESS);
try {
command.execute();
fail("we expect to receive a " + Error.class.getSimpleName());
} catch (Exception e) {
// the actual error is an extra cause level deep because Hystrix needs to wrap Throwable/Error as it's public
// methods only support Exception and it's not a strong enough reason to break backwards compatibility and jump to version 2.x
// so HystrixRuntimeException -> wrapper Exception -> actual Error
assertEquals("Unrecoverable Error for TestHystrixCommand", e.getCause().getCause().getMessage());
}
assertEquals("Unrecoverable Error for TestHystrixCommand", command.getFailedExecutionException().getCause().getMessage());
assertTrue(command.getExecutionTimeInMilliseconds() > -1);
assertTrue(command.isFailedExecution());
assertCommandExecutionEvents(command, HystrixEventType.FAILURE);
assertNotNull(command.getExecutionException());
assertEquals(0, command.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
static class EventCommand extends HystrixCommand {
public EventCommand() {
super(Setter.withGroupKey(HystrixCommandGroupKey.Factory.asKey("eventGroup")).andCommandPropertiesDefaults(new HystrixCommandProperties.Setter().withFallbackIsolationSemaphoreMaxConcurrentRequests(3)));
}
@Override
protected String run() throws Exception {
System.out.println(Thread.currentThread().getName() + " : In run()");
throw new RuntimeException("run_exception");
}
@Override
public String getFallback() {
try {
System.out.println(Thread.currentThread().getName() + " : In fallback => " + getExecutionEvents());
Thread.sleep(30000L);
} catch (InterruptedException e) {
System.out.println(Thread.currentThread().getName() + " : Interruption occurred");
}
System.out.println(Thread.currentThread().getName() + " : CMD Success Result");
return "fallback";
}
}
@Test
public void testNonBlockingCommandQueueFiresTimeout() { //see https://github.com/Netflix/Hystrix/issues/514
final TestHystrixCommand<?> cmd = getCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.SUCCESS, 200, AbstractTestHystrixCommand.FallbackResult.SUCCESS, 50);
new Thread() {
@Override
public void run() {
cmd.queue();
}
}.start();
try {
Thread.sleep(200);
//timeout should occur in 50ms, and underlying thread should run for 500ms
//therefore, after 200ms, the command should have finished with a fallback on timeout
} catch (InterruptedException ie) {
throw new RuntimeException(ie);
}
assertTrue(cmd.isExecutionComplete());
assertTrue(cmd.isResponseTimedOut());
assertEquals(0, cmd.metrics.getCurrentConcurrentExecutionCount());
}
@Override
protected void assertHooksOnSuccess(Func0<TestHystrixCommand<?>> ctor, Action1<TestHystrixCommand<?>> assertion) {
assertExecute(ctor.call(), assertion, true);
assertBlockingQueue(ctor.call(), assertion, true);
assertNonBlockingQueue(ctor.call(), assertion, true, false);
assertBlockingObserve(ctor.call(), assertion, true);
assertNonBlockingObserve(ctor.call(), assertion, true);
}
@Override
protected void assertHooksOnFailure(Func0<TestHystrixCommand<?>> ctor, Action1<TestHystrixCommand<?>> assertion) {
assertExecute(ctor.call(), assertion, false);
assertBlockingQueue(ctor.call(), assertion, false);
assertNonBlockingQueue(ctor.call(), assertion, false, false);
assertBlockingObserve(ctor.call(), assertion, false);
assertNonBlockingObserve(ctor.call(), assertion, false);
}
@Override
protected void assertHooksOnFailure(Func0<TestHystrixCommand<?>> ctor, Action1<TestHystrixCommand<?>> assertion, boolean failFast) {
assertExecute(ctor.call(), assertion, false);
assertBlockingQueue(ctor.call(), assertion, false);
assertNonBlockingQueue(ctor.call(), assertion, false, failFast);
assertBlockingObserve(ctor.call(), assertion, false);
assertNonBlockingObserve(ctor.call(), assertion, false);
}
/**
* Run the command via {@link com.netflix.hystrix.HystrixCommand#execute()} and then assert
* @param command command to run
* @param assertion assertions to check
* @param isSuccess should the command succeed?
*/
private void assertExecute(TestHystrixCommand<?> command, Action1<TestHystrixCommand<?>> assertion, boolean isSuccess) {
System.out.println(System.currentTimeMillis() + " : " + Thread.currentThread().getName() + " : Running command.execute() and then assertions...");
if (isSuccess) {
command.execute();
} else {
try {
Object o = command.execute();
fail("Expected a command failure!");
} catch (Exception ex) {
System.out.println("Received expected ex : " + ex);
ex.printStackTrace();
}
}
assertion.call(command);
}
/**
* Run the command via {@link com.netflix.hystrix.HystrixCommand#queue()}, immediately block, and then assert
* @param command command to run
* @param assertion assertions to check
* @param isSuccess should the command succeed?
*/
private void assertBlockingQueue(TestHystrixCommand<?> command, Action1<TestHystrixCommand<?>> assertion, boolean isSuccess) {
System.out.println("Running command.queue(), immediately blocking and then running assertions...");
if (isSuccess) {
try {
command.queue().get();
} catch (Exception e) {
throw new RuntimeException(e);
}
} else {
try {
command.queue().get();
fail("Expected a command failure!");
} catch (InterruptedException ie) {
throw new RuntimeException(ie);
} catch (ExecutionException ee) {
System.out.println("Received expected ex : " + ee.getCause());
ee.getCause().printStackTrace();
} catch (Exception e) {
System.out.println("Received expected ex : " + e);
e.printStackTrace();
}
}
assertion.call(command);
}
/**
* Run the command via {@link com.netflix.hystrix.HystrixCommand#queue()}, then poll for the command to be finished.
* When it is finished, assert
* @param command command to run
* @param assertion assertions to check
* @param isSuccess should the command succeed?
*/
private void assertNonBlockingQueue(TestHystrixCommand<?> command, Action1<TestHystrixCommand<?>> assertion, boolean isSuccess, boolean failFast) {
System.out.println("Running command.queue(), sleeping the test thread until command is complete, and then running assertions...");
Future<?> f = null;
if (failFast) {
try {
f = command.queue();
fail("Expected a failure when queuing the command");
} catch (Exception ex) {
System.out.println("Received expected fail fast ex : " + ex);
ex.printStackTrace();
}
} else {
try {
f = command.queue();
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
awaitCommandCompletion(command);
assertion.call(command);
if (isSuccess) {
try {
f.get();
} catch (Exception ex) {
throw new RuntimeException(ex);
}
} else {
try {
f.get();
fail("Expected a command failure!");
} catch (InterruptedException ie) {
throw new RuntimeException(ie);
} catch (ExecutionException ee) {
System.out.println("Received expected ex : " + ee.getCause());
ee.getCause().printStackTrace();
} catch (Exception e) {
System.out.println("Received expected ex : " + e);
e.printStackTrace();
}
}
}
private <T> void awaitCommandCompletion(TestHystrixCommand<T> command) {
while (!command.isExecutionComplete()) {
try {
Thread.sleep(10);
} catch (InterruptedException e) {
throw new RuntimeException("interrupted");
}
}
}
/**
* Test a command execution that fails but has a fallback.
*/
@Test
public void testExecutionFailureWithFallbackImplementedButDisabled() {
TestHystrixCommand<Boolean> commandEnabled = new KnownFailureTestCommandWithFallback(new TestCircuitBreaker(), true);
try {
assertEquals(false, commandEnabled.execute());
} catch (Exception e) {
e.printStackTrace();
fail("We should have received a response from the fallback.");
}
TestHystrixCommand<Boolean> commandDisabled = new KnownFailureTestCommandWithFallback(new TestCircuitBreaker(), false);
try {
assertEquals(false, commandDisabled.execute());
fail("expect exception thrown");
} catch (Exception e) {
// expected
}
assertEquals("we failed with a simulated issue", commandDisabled.getFailedExecutionException().getMessage());
assertTrue(commandDisabled.isFailedExecution());
assertCommandExecutionEvents(commandEnabled, HystrixEventType.FAILURE, HystrixEventType.FALLBACK_SUCCESS);
assertCommandExecutionEvents(commandDisabled, HystrixEventType.FAILURE);
assertNotNull(commandDisabled.getExecutionException());
assertEquals(0, commandDisabled.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(2);
}
@Test
public void testExecutionTimeoutValue() {
HystrixCommand.Setter properties = HystrixCommand.Setter
.withGroupKey(HystrixCommandGroupKey.Factory.asKey("TestKey"))
.andCommandPropertiesDefaults(HystrixCommandProperties.Setter()
.withExecutionTimeoutInMilliseconds(50));
HystrixCommand<String> command = new HystrixCommand<String>(properties) {
@Override
protected String run() throws Exception {
Thread.sleep(3000);
// should never reach here
return "hello";
}
@Override
protected String getFallback() {
if (isResponseTimedOut()) {
return "timed-out";
} else {
return "abc";
}
}
};
String value = command.execute();
assertTrue(command.isResponseTimedOut());
assertEquals("expected fallback value", "timed-out", value);
}
/**
* See https://github.com/Netflix/Hystrix/issues/212
*/
@Test
public void testObservableTimeoutNoFallbackThreadContext() {
TestSubscriber<Object> ts = new TestSubscriber<Object>();
final AtomicReference<Thread> onErrorThread = new AtomicReference<Thread>();
final AtomicBoolean isRequestContextInitialized = new AtomicBoolean();
TestHystrixCommand<?> command = getCommand(ExecutionIsolationStrategy.THREAD, AbstractTestHystrixCommand.ExecutionResult.SUCCESS, 200, AbstractTestHystrixCommand.FallbackResult.UNIMPLEMENTED, 50);
command.toObservable().doOnError(new Action1<Throwable>() {
@Override
public void call(Throwable t1) {
System.out.println("onError: " + t1);
System.out.println("onError Thread: " + Thread.currentThread());
System.out.println("ThreadContext in onError: " + HystrixRequestContext.isCurrentThreadInitialized());
onErrorThread.set(Thread.currentThread());
isRequestContextInitialized.set(HystrixRequestContext.isCurrentThreadInitialized());
}
}).subscribe(ts);
ts.awaitTerminalEvent();
assertTrue(isRequestContextInitialized.get());
assertTrue(onErrorThread.get().getName().startsWith("HystrixTimer"));
List<Throwable> errors = ts.getOnErrorEvents();
assertEquals(1, errors.size());
Throwable e = errors.get(0);
if (errors.get(0) instanceof HystrixRuntimeException) {
HystrixRuntimeException de = (HystrixRuntimeException) e;
assertNotNull(de.getFallbackException());
assertTrue(de.getFallbackException() instanceof UnsupportedOperationException);
assertNotNull(de.getImplementingClass());
assertNotNull(de.getCause());
assertTrue(de.getCause() instanceof TimeoutException);
} else {
fail("the exception should be ExecutionException with cause as HystrixRuntimeException");
}
assertTrue(command.getExecutionTimeInMilliseconds() > -1);
assertTrue(command.isResponseTimedOut());
assertCommandExecutionEvents(command, HystrixEventType.TIMEOUT, HystrixEventType.FALLBACK_MISSING);
assertNotNull(command.getExecutionException());
assertEquals(0, command.getBuilder().metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
@Test
public void testExceptionConvertedToBadRequestExceptionInExecutionHookBypassesCircuitBreaker() {
TestCircuitBreaker circuitBreaker = new TestCircuitBreaker();
ExceptionToBadRequestByExecutionHookCommand command = new ExceptionToBadRequestByExecutionHookCommand(circuitBreaker, ExecutionIsolationStrategy.THREAD);
try {
command.execute();
fail("we expect to receive a " + HystrixBadRequestException.class.getSimpleName());
} catch (HystrixBadRequestException e) {
// success
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
fail("We expect a " + HystrixBadRequestException.class.getSimpleName() + " but got a " + e.getClass().getSimpleName());
}
assertCommandExecutionEvents(command, HystrixEventType.BAD_REQUEST);
assertEquals(0, circuitBreaker.metrics.getCurrentConcurrentExecutionCount());
assertSaneHystrixRequestLog(1);
}
@Test
public void testInterruptFutureOnTimeout() throws InterruptedException, ExecutionException {
// given
InterruptibleCommand cmd = new InterruptibleCommand(new TestCircuitBreaker(), true);
// when
Future<Boolean> f = cmd.queue();
// then
Thread.sleep(500);
assertTrue(cmd.hasBeenInterrupted());
}
@Test
public void testInterruptObserveOnTimeout() throws InterruptedException {
// given
InterruptibleCommand cmd = new InterruptibleCommand(new TestCircuitBreaker(), true);
// when
cmd.observe().subscribe();
// then
Thread.sleep(500);
assertTrue(cmd.hasBeenInterrupted());
}
@Test
public void testInterruptToObservableOnTimeout() throws InterruptedException {
// given
InterruptibleCommand cmd = new InterruptibleCommand(new TestCircuitBreaker(), true);
// when
cmd.toObservable().subscribe();
// then
Thread.sleep(500);
assertTrue(cmd.hasBeenInterrupted());
}
@Test
public void testDoNotInterruptFutureOnTimeoutIfPropertySaysNotTo() throws InterruptedException, ExecutionException {
// given
InterruptibleCommand cmd = new InterruptibleCommand(new TestCircuitBreaker(), false);
// when
Future<Boolean> f = cmd.queue();
// then
Thread.sleep(500);
assertFalse(cmd.hasBeenInterrupted());
}
@Test
public void testDoNotInterruptObserveOnTimeoutIfPropertySaysNotTo() throws InterruptedException {
// given
InterruptibleCommand cmd = new InterruptibleCommand(new TestCircuitBreaker(), false);
// when
cmd.observe().subscribe();
// then
Thread.sleep(500);
assertFalse(cmd.hasBeenInterrupted());
}
@Test
public void testDoNotInterruptToObservableOnTimeoutIfPropertySaysNotTo() throws InterruptedException {
// given
InterruptibleCommand cmd = new InterruptibleCommand(new TestCircuitBreaker(), false);
// when
cmd.toObservable().subscribe();
// then
Thread.sleep(500);
assertFalse(cmd.hasBeenInterrupted());
}
@Test
public void testChainedCommand() {
class SubCommand extends TestHystrixCommand<Integer> {
public SubCommand(TestCircuitBreaker circuitBreaker) {
super(testPropsBuilder().setCircuitBreaker(circuitBreaker).setMetrics(circuitBreaker.metrics));
}
@Override
protected Integer run() throws Exception {
return 2;
}
}
class PrimaryCommand extends TestHystrixCommand<Integer> {
public PrimaryCommand(TestCircuitBreaker circuitBreaker) {
super(testPropsBuilder().setCircuitBreaker(circuitBreaker).setMetrics(circuitBreaker.metrics));
}
@Override
protected Integer run() throws Exception {
throw new RuntimeException("primary failure");
}
@Override
protected Integer getFallback() {
SubCommand subCmd = new SubCommand(new TestCircuitBreaker());
return subCmd.execute();
}
}
assertTrue(2 == new PrimaryCommand(new TestCircuitBreaker()).execute());
}
@Test
public void testSlowFallback() {
class PrimaryCommand extends TestHystrixCommand<Integer> {
public PrimaryCommand(TestCircuitBreaker circuitBreaker) {
super(testPropsBuilder().setCircuitBreaker(circuitBreaker).setMetrics(circuitBreaker.metrics));
}
@Override
protected Integer run() throws Exception {
throw new RuntimeException("primary failure");
}
@Override
protected Integer getFallback() {
try {
Thread.sleep(1500);
return 1;
} catch (InterruptedException ie) {
System.out.println("Caught Interrupted Exception");
ie.printStackTrace();
}
return -1;
}
}
assertTrue(1 == new PrimaryCommand(new TestCircuitBreaker()).execute());
}
@Test
public void testOnRunStartHookThrowsSemaphoreIsolated() {
final AtomicBoolean exceptionEncountered = new AtomicBoolean(false);
final AtomicBoolean onThreadStartInvoked = new AtomicBoolean(false);
final AtomicBoolean onThreadCompleteInvoked = new AtomicBoolean(false);
final AtomicBoolean executionAttempted = new AtomicBoolean(false);
class FailureInjectionHook extends HystrixCommandExecutionHook {
@Override
public <T> void onExecutionStart(HystrixInvokable<T> commandInstance) {
throw new HystrixRuntimeException(HystrixRuntimeException.FailureType.COMMAND_EXCEPTION, commandInstance.getClass(), "Injected Failure", null, null);
}
@Override
public <T> void onThreadStart(HystrixInvokable<T> commandInstance) {
onThreadStartInvoked.set(true);
super.onThreadStart(commandInstance);
}
@Override
public <T> void onThreadComplete(HystrixInvokable<T> commandInstance) {
onThreadCompleteInvoked.set(true);
super.onThreadComplete(commandInstance);
}
}
final FailureInjectionHook failureInjectionHook = new FailureInjectionHook();
class FailureInjectedCommand extends TestHystrixCommand<Integer> {
public FailureInjectedCommand(ExecutionIsolationStrategy isolationStrategy) {
super(testPropsBuilder().setCommandPropertiesDefaults(HystrixCommandPropertiesTest.getUnitTestPropertiesSetter().withExecutionIsolationStrategy(isolationStrategy)), failureInjectionHook);
}
@Override
protected Integer run() throws Exception {
executionAttempted.set(true);
return 3;
}
}
TestHystrixCommand<Integer> semaphoreCmd = new FailureInjectedCommand(ExecutionIsolationStrategy.SEMAPHORE);
try {
int result = semaphoreCmd.execute();
System.out.println("RESULT : " + result);
} catch (Throwable ex) {
ex.printStackTrace();
exceptionEncountered.set(true);
}
assertTrue(exceptionEncountered.get());
assertFalse(onThreadStartInvoked.get());
assertFalse(onThreadCompleteInvoked.get());
assertFalse(executionAttempted.get());
}
@Test
public void testOnRunStartHookThrowsThreadIsolated() {
final AtomicBoolean exceptionEncountered = new AtomicBoolean(false);
final AtomicBoolean onThreadStartInvoked = new AtomicBoolean(false);
final AtomicBoolean onThreadCompleteInvoked = new AtomicBoolean(false);
final AtomicBoolean executionAttempted = new AtomicBoolean(false);
class FailureInjectionHook extends HystrixCommandExecutionHook {
@Override
public <T> void onExecutionStart(HystrixInvokable<T> commandInstance) {
throw new HystrixRuntimeException(HystrixRuntimeException.FailureType.COMMAND_EXCEPTION, commandInstance.getClass(), "Injected Failure", null, null);
}
@Override
public <T> void onThreadStart(HystrixInvokable<T> commandInstance) {
onThreadStartInvoked.set(true);
super.onThreadStart(commandInstance);
}
@Override
public <T> void onThreadComplete(HystrixInvokable<T> commandInstance) {
onThreadCompleteInvoked.set(true);
super.onThreadComplete(commandInstance);
}
}
final FailureInjectionHook failureInjectionHook = new FailureInjectionHook();
class FailureInjectedCommand extends TestHystrixCommand<Integer> {
public FailureInjectedCommand(ExecutionIsolationStrategy isolationStrategy) {
super(testPropsBuilder(new TestCircuitBreaker()).setCommandPropertiesDefaults(HystrixCommandPropertiesTest.getUnitTestPropertiesSetter().withExecutionIsolationStrategy(isolationStrategy)), failureInjectionHook);
}
@Override
protected Integer run() throws Exception {
executionAttempted.set(true);
return 3;
}
}
TestHystrixCommand<Integer> threadCmd = new FailureInjectedCommand(ExecutionIsolationStrategy.THREAD);
try {
int result = threadCmd.execute();
System.out.println("RESULT : " + result);
} catch (Throwable ex) {
ex.printStackTrace();
exceptionEncountered.set(true);
}
assertTrue(exceptionEncountered.get());
assertTrue(onThreadStartInvoked.get());
assertTrue(onThreadCompleteInvoked.get());
assertFalse(executionAttempted.get());
}
/* ******************************************************************************** */
/* ******************************************************************************** */
/* private HystrixCommand class implementations for unit testing */
/* ******************************************************************************** */
/* ******************************************************************************** */
@Override
TestHystrixCommand<?> getCommand(ExecutionIsolationStrategy isolationStrategy, AbstractTestHystrixCommand.ExecutionResult executionResult, int executionLatency, AbstractTestHystrixCommand.FallbackResult fallbackResult, int fallbackLatency, TestCircuitBreaker circuitBreaker, HystrixThreadPool threadPool, int timeout, AbstractTestHystrixCommand.CacheEnabled cacheEnabled, Object value, TryableSemaphore executionSemaphore, TryableSemaphore fallbackSemaphore, boolean circuitBreakerDisabled) {
return FlexibleTestHystrixCommand.from(isolationStrategy, executionResult, executionLatency, fallbackResult, fallbackLatency, circuitBreaker, threadPool, timeout, cacheEnabled, value, executionSemaphore, fallbackSemaphore, circuitBreakerDisabled);
}
private static class FlexibleTestHystrixCommand {
public static int EXECUTE_VALUE = 1;
public static int FALLBACK_VALUE = 11;
public static AbstractFlexibleTestHystrixCommand from(ExecutionIsolationStrategy isolationStrategy, AbstractTestHystrixCommand.ExecutionResult executionResult, int executionLatency, AbstractTestHystrixCommand.FallbackResult fallbackResult, int fallbackLatency, TestCircuitBreaker circuitBreaker, HystrixThreadPool threadPool, int timeout, AbstractTestHystrixCommand.CacheEnabled cacheEnabled, Object value, TryableSemaphore executionSemaphore, TryableSemaphore fallbackSemaphore, boolean circuitBreakerDisabled) {
if (fallbackResult.equals(AbstractTestHystrixCommand.FallbackResult.UNIMPLEMENTED)) {
return new FlexibleTestHystrixCommandNoFallback(isolationStrategy, executionResult, executionLatency, circuitBreaker, threadPool, timeout, cacheEnabled, value, executionSemaphore, fallbackSemaphore, circuitBreakerDisabled);
} else {
return new FlexibleTestHystrixCommandWithFallback(isolationStrategy, executionResult, executionLatency, fallbackResult, fallbackLatency, circuitBreaker, threadPool, timeout, cacheEnabled, value, executionSemaphore, fallbackSemaphore, circuitBreakerDisabled);
}
}
}
private static class AbstractFlexibleTestHystrixCommand extends TestHystrixCommand<Integer> {
protected final AbstractTestHystrixCommand.ExecutionResult executionResult;
protected final int executionLatency;
protected final CacheEnabled cacheEnabled;
protected final Object value;
AbstractFlexibleTestHystrixCommand(ExecutionIsolationStrategy isolationStrategy, AbstractTestHystrixCommand.ExecutionResult executionResult, int executionLatency, TestCircuitBreaker circuitBreaker, HystrixThreadPool threadPool, int timeout, CacheEnabled cacheEnabled, Object value, TryableSemaphore executionSemaphore, TryableSemaphore fallbackSemaphore, boolean circuitBreakerDisabled) {
super(testPropsBuilder(circuitBreaker)
.setCircuitBreaker(circuitBreaker)
.setMetrics(circuitBreaker.metrics)
.setThreadPool(threadPool)
.setCommandPropertiesDefaults(HystrixCommandPropertiesTest.getUnitTestPropertiesSetter()
.withExecutionIsolationStrategy(isolationStrategy)
.withExecutionTimeoutInMilliseconds(timeout)
.withCircuitBreakerEnabled(!circuitBreakerDisabled))
.setExecutionSemaphore(executionSemaphore)
.setFallbackSemaphore(fallbackSemaphore));
this.executionResult = executionResult;
this.executionLatency = executionLatency;
this.cacheEnabled = cacheEnabled;
this.value = value;
}
@Override
protected Integer run() throws Exception {
System.out.println(System.currentTimeMillis() + " : " + Thread.currentThread().getName() + " starting the run() method");
addLatency(executionLatency);
if (executionResult == AbstractTestHystrixCommand.ExecutionResult.SUCCESS) {
return FlexibleTestHystrixCommand.EXECUTE_VALUE;
} else if (executionResult == AbstractTestHystrixCommand.ExecutionResult.FAILURE) {
throw new RuntimeException("Execution Failure for TestHystrixCommand");
} else if (executionResult == AbstractTestHystrixCommand.ExecutionResult.HYSTRIX_FAILURE) {
throw new HystrixRuntimeException(HystrixRuntimeException.FailureType.COMMAND_EXCEPTION, AbstractFlexibleTestHystrixCommand.class, "Execution Hystrix Failure for TestHystrixCommand", new RuntimeException("Execution Failure for TestHystrixCommand"), new RuntimeException("Fallback Failure for TestHystrixCommand"));
} else if (executionResult == AbstractTestHystrixCommand.ExecutionResult.RECOVERABLE_ERROR) {
throw new java.lang.Error("Execution ERROR for TestHystrixCommand");
} else if (executionResult == AbstractTestHystrixCommand.ExecutionResult.UNRECOVERABLE_ERROR) {
throw new StackOverflowError("Unrecoverable Error for TestHystrixCommand");
} else if (executionResult == AbstractTestHystrixCommand.ExecutionResult.BAD_REQUEST) {
throw new HystrixBadRequestException("Execution BadRequestException for TestHystrixCommand");
} else {
throw new RuntimeException("You passed in a executionResult enum that can't be represented in HystrixCommand: " + executionResult);
}
}
@Override
public String getCacheKey() {
if (cacheEnabled == CacheEnabled.YES)
return value.toString();
else
return null;
}
protected void addLatency(int latency) {
if (latency > 0) {
try {
System.out.println(System.currentTimeMillis() + " : " + Thread.currentThread().getName() + " About to sleep for : " + latency);
Thread.sleep(latency);
System.out.println(System.currentTimeMillis() + " : " + Thread.currentThread().getName() + " Woke up from sleep!");
} catch (InterruptedException e) {
e.printStackTrace();
// ignore and sleep some more to simulate a dependency that doesn't obey interrupts
try {
Thread.sleep(latency);
} catch (Exception e2) {
// ignore
}
System.out.println("after interruption with extra sleep");
}
}
}
}
private static class FlexibleTestHystrixCommandWithFallback extends AbstractFlexibleTestHystrixCommand {
protected final AbstractTestHystrixCommand.FallbackResult fallbackResult;
protected final int fallbackLatency;
FlexibleTestHystrixCommandWithFallback(ExecutionIsolationStrategy isolationStrategy, AbstractTestHystrixCommand.ExecutionResult executionResult, int executionLatency, FallbackResult fallbackResult, int fallbackLatency, TestCircuitBreaker circuitBreaker, HystrixThreadPool threadPool, int timeout, CacheEnabled cacheEnabled, Object value, TryableSemaphore executionSemaphore, TryableSemaphore fallbackSemaphore, boolean circuitBreakerDisabled) {
super(isolationStrategy, executionResult, executionLatency, circuitBreaker, threadPool, timeout, cacheEnabled, value, executionSemaphore, fallbackSemaphore, circuitBreakerDisabled);
this.fallbackResult = fallbackResult;
this.fallbackLatency = fallbackLatency;
}
@Override
protected Integer getFallback() {
addLatency(fallbackLatency);
if (fallbackResult == AbstractTestHystrixCommand.FallbackResult.SUCCESS) {
return FlexibleTestHystrixCommand.FALLBACK_VALUE;
} else if (fallbackResult == AbstractTestHystrixCommand.FallbackResult.FAILURE) {
throw new RuntimeException("Fallback Failure for TestHystrixCommand");
} else if (fallbackResult == FallbackResult.UNIMPLEMENTED) {
return super.getFallback();
} else {
throw new RuntimeException("You passed in a fallbackResult enum that can't be represented in HystrixCommand: " + fallbackResult);
}
}
}
private static class FlexibleTestHystrixCommandNoFallback extends AbstractFlexibleTestHystrixCommand {
FlexibleTestHystrixCommandNoFallback(ExecutionIsolationStrategy isolationStrategy, AbstractTestHystrixCommand.ExecutionResult executionResult, int executionLatency, TestCircuitBreaker circuitBreaker, HystrixThreadPool threadPool, int timeout, CacheEnabled cacheEnabled, Object value, TryableSemaphore executionSemaphore, TryableSemaphore fallbackSemaphore, boolean circuitBreakerDisabled) {
super(isolationStrategy, executionResult, executionLatency, circuitBreaker, threadPool, timeout, cacheEnabled, value, executionSemaphore, fallbackSemaphore, circuitBreakerDisabled);
}
}
/**
* Successful execution - no fallback implementation.
*/
private static class SuccessfulTestCommand extends TestHystrixCommand<Boolean> {
public SuccessfulTestCommand() {
this(HystrixCommandPropertiesTest.getUnitTestPropertiesSetter());
}
public SuccessfulTestCommand(HystrixCommandProperties.Setter properties) {
super(testPropsBuilder().setCommandPropertiesDefaults(properties));
}
@Override
protected Boolean run() {
return true;
}
}
/**
* Successful execution - no fallback implementation.
*/
private static class DynamicOwnerTestCommand extends TestHystrixCommand<Boolean> {
public DynamicOwnerTestCommand(HystrixCommandGroupKey owner) {
super(testPropsBuilder().setOwner(owner));
}
@Override
protected Boolean run() {
System.out.println("successfully executed");
return true;
}
}
/**
* Successful execution - no fallback implementation.
*/
private static class DynamicOwnerAndKeyTestCommand extends TestHystrixCommand<Boolean> {
public DynamicOwnerAndKeyTestCommand(HystrixCommandGroupKey owner, HystrixCommandKey key) {
super(testPropsBuilder().setOwner(owner).setCommandKey(key).setCircuitBreaker(null).setMetrics(null));
// we specifically are NOT passing in a circuit breaker here so we test that it creates a new one correctly based on the dynamic key
}
@Override
protected Boolean run() {
System.out.println("successfully executed");
return true;
}
}
/**
* Failed execution with known exception (HystrixException) - no fallback implementation.
*/
private static class KnownFailureTestCommandWithoutFallback extends TestHystrixCommand<Boolean> {
private KnownFailureTestCommandWithoutFallback(TestCircuitBreaker circuitBreaker) {
super(testPropsBuilder().setCircuitBreaker(circuitBreaker).setMetrics(circuitBreaker.metrics));
}
@Override
protected Boolean run() {
System.out.println("*** simulated failed execution *** ==> " + Thread.currentThread());
throw new RuntimeException("we failed with a simulated issue");
}
}
/**
* Failed execution - fallback implementation successfully returns value.
*/
private static class KnownFailureTestCommandWithFallback extends TestHystrixCommand<Boolean> {
public KnownFailureTestCommandWithFallback(TestCircuitBreaker circuitBreaker) {
super(testPropsBuilder().setCircuitBreaker(circuitBreaker).setMetrics(circuitBreaker.metrics));
}
public KnownFailureTestCommandWithFallback(TestCircuitBreaker circuitBreaker, boolean fallbackEnabled) {
super(testPropsBuilder().setCircuitBreaker(circuitBreaker).setMetrics(circuitBreaker.metrics)
.setCommandPropertiesDefaults(HystrixCommandPropertiesTest.getUnitTestPropertiesSetter().withFallbackEnabled(fallbackEnabled)));
}
@Override
protected Boolean run() {
System.out.println("*** simulated failed execution ***");
throw new RuntimeException("we failed with a simulated issue");
}
@Override
protected Boolean getFallback() {
return false;
}
}
/**
* A Command implementation that supports caching.
*/
private static class SuccessfulCacheableCommand<T> extends TestHystrixCommand<T> {
private final boolean cacheEnabled;
private volatile boolean executed = false;
private final T value;
public SuccessfulCacheableCommand(TestCircuitBreaker circuitBreaker, boolean cacheEnabled, T value) {
super(testPropsBuilder().setCircuitBreaker(circuitBreaker).setMetrics(circuitBreaker.metrics));
this.value = value;
this.cacheEnabled = cacheEnabled;
}
@Override
protected T run() {
executed = true;
System.out.println("successfully executed");
return value;
}
public boolean isCommandRunningInThread() {
return super.getProperties().executionIsolationStrategy().get().equals(ExecutionIsolationStrategy.THREAD);
}
@Override
public String getCacheKey() {
if (cacheEnabled)
return value.toString();
else
return null;
}
}
/**
* A Command implementation that supports caching.
*/
private static class SuccessfulCacheableCommandViaSemaphore extends TestHystrixCommand<String> {
private final boolean cacheEnabled;
private volatile boolean executed = false;
private final String value;
public SuccessfulCacheableCommandViaSemaphore(TestCircuitBreaker circuitBreaker, boolean cacheEnabled, String value) {
super(testPropsBuilder().setCircuitBreaker(circuitBreaker).setMetrics(circuitBreaker.metrics)
.setCommandPropertiesDefaults(HystrixCommandPropertiesTest.getUnitTestPropertiesSetter().withExecutionIsolationStrategy(ExecutionIsolationStrategy.SEMAPHORE)));
this.value = value;
this.cacheEnabled = cacheEnabled;
}
@Override
protected String run() {
executed = true;
System.out.println("successfully executed");
return value;
}
public boolean isCommandRunningInThread() {
return super.getProperties().executionIsolationStrategy().get().equals(ExecutionIsolationStrategy.THREAD);
}
@Override
public String getCacheKey() {
if (cacheEnabled)
return value;
else
return null;
}
}
/**
* A Command implementation that supports caching and execution takes a while.
* <p>
* Used to test scenario where Futures are returned with a backing call still executing.
*/
private static class SlowCacheableCommand extends TestHystrixCommand<String> {
private final String value;
private final int duration;
private volatile boolean executed = false;
public SlowCacheableCommand(TestCircuitBreaker circuitBreaker, String value, int duration) {
super(testPropsBuilder().setCircuitBreaker(circuitBreaker).setMetrics(circuitBreaker.metrics));
this.value = value;
this.duration = duration;
}
@Override
protected String run() {
executed = true;
try {
Thread.sleep(duration);
} catch (Exception e) {
e.printStackTrace();
}
System.out.println("successfully executed");
return value;
}
@Override
public String getCacheKey() {
return value;
}
}
/**
* This has a ThreadPool that has a single thread and queueSize of 1.
*/
private static class TestCommandRejection extends TestHystrixCommand<Boolean> {
private final static int FALLBACK_NOT_IMPLEMENTED = 1;
private final static int FALLBACK_SUCCESS = 2;
private final static int FALLBACK_FAILURE = 3;
private final int fallbackBehavior;
private final int sleepTime;
private TestCommandRejection(TestCircuitBreaker circuitBreaker, HystrixThreadPool threadPool, int sleepTime, int timeout, int fallbackBehavior) {
super(testPropsBuilder().setThreadPool(threadPool).setCircuitBreaker(circuitBreaker).setMetrics(circuitBreaker.metrics)
.setCommandPropertiesDefaults(HystrixCommandPropertiesTest.getUnitTestPropertiesSetter().withExecutionTimeoutInMilliseconds(timeout)));
this.fallbackBehavior = fallbackBehavior;
this.sleepTime = sleepTime;
}
@Override
protected Boolean run() {
System.out.println(">>> TestCommandRejection running");
try {
Thread.sleep(sleepTime);
} catch (InterruptedException e) {
e.printStackTrace();
}
return true;
}
@Override
protected Boolean getFallback() {
if (fallbackBehavior == FALLBACK_SUCCESS) {
return false;
} else if (fallbackBehavior == FALLBACK_FAILURE) {
throw new RuntimeException("failed on fallback");
} else {
// FALLBACK_NOT_IMPLEMENTED
return super.getFallback();
}
}
}
/**
* Command that receives a custom thread-pool, sleepTime, timeout
*/
private static class CommandWithCustomThreadPool extends TestHystrixCommand<Boolean> {
public boolean didExecute = false;
private final int sleepTime;
private CommandWithCustomThreadPool(TestCircuitBreaker circuitBreaker, HystrixThreadPool threadPool, int sleepTime, HystrixCommandProperties.Setter properties) {
super(testPropsBuilder().setThreadPool(threadPool).setCircuitBreaker(circuitBreaker).setMetrics(circuitBreaker.metrics).setCommandPropertiesDefaults(properties));
this.sleepTime = sleepTime;
}
@Override
protected Boolean run() {
System.out.println("**** Executing CommandWithCustomThreadPool. Execution => " + sleepTime);
didExecute = true;
try {
Thread.sleep(sleepTime);
} catch (InterruptedException e) {
e.printStackTrace();
}
return true;
}
}
/**
* The run() will fail and getFallback() take a long time.
*/
private static class TestSemaphoreCommandWithSlowFallback extends TestHystrixCommand<Boolean> {
private final long fallbackSleep;
private TestSemaphoreCommandWithSlowFallback(TestCircuitBreaker circuitBreaker, int fallbackSemaphoreExecutionCount, long fallbackSleep) {
super(testPropsBuilder().setCircuitBreaker(circuitBreaker).setMetrics(circuitBreaker.metrics)
.setCommandPropertiesDefaults(HystrixCommandPropertiesTest.getUnitTestPropertiesSetter().withFallbackIsolationSemaphoreMaxConcurrentRequests(fallbackSemaphoreExecutionCount).withExecutionIsolationThreadInterruptOnTimeout(false)));
this.fallbackSleep = fallbackSleep;
}
@Override
protected Boolean run() {
throw new RuntimeException("run fails");
}
@Override
protected Boolean getFallback() {
try {
Thread.sleep(fallbackSleep);
} catch (InterruptedException e) {
e.printStackTrace();
}
return true;
}
}
private static class NoRequestCacheTimeoutWithoutFallback extends TestHystrixCommand<Boolean> {
public NoRequestCacheTimeoutWithoutFallback(TestCircuitBreaker circuitBreaker) {
super(testPropsBuilder().setCircuitBreaker(circuitBreaker).setMetrics(circuitBreaker.metrics)
.setCommandPropertiesDefaults(HystrixCommandPropertiesTest.getUnitTestPropertiesSetter().withExecutionTimeoutInMilliseconds(200).withCircuitBreakerEnabled(false)));
// we want it to timeout
}
@Override
protected Boolean run() {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
System.out.println(">>>> Sleep Interrupted: " + e.getMessage());
// e.printStackTrace();
}
return true;
}
@Override
public String getCacheKey() {
return null;
}
}
/**
* The run() will take time. Configurable fallback implementation.
*/
private static class TestSemaphoreCommand extends TestHystrixCommand<Boolean> {
private final long executionSleep;
private final static int RESULT_SUCCESS = 1;
private final static int RESULT_FAILURE = 2;
private final static int RESULT_BAD_REQUEST_EXCEPTION = 3;
private final int resultBehavior;
private final static int FALLBACK_SUCCESS = 10;
private final static int FALLBACK_NOT_IMPLEMENTED = 11;
private final static int FALLBACK_FAILURE = 12;
private final int fallbackBehavior;
private TestSemaphoreCommand(TestCircuitBreaker circuitBreaker, int executionSemaphoreCount, long executionSleep, int resultBehavior, int fallbackBehavior) {
super(testPropsBuilder().setCircuitBreaker(circuitBreaker).setMetrics(circuitBreaker.metrics)
.setCommandPropertiesDefaults(HystrixCommandPropertiesTest.getUnitTestPropertiesSetter()
.withExecutionIsolationStrategy(ExecutionIsolationStrategy.SEMAPHORE)
.withExecutionIsolationSemaphoreMaxConcurrentRequests(executionSemaphoreCount)));
this.executionSleep = executionSleep;
this.resultBehavior = resultBehavior;
this.fallbackBehavior = fallbackBehavior;
}
private TestSemaphoreCommand(TestCircuitBreaker circuitBreaker, TryableSemaphore semaphore, long executionSleep, int resultBehavior, int fallbackBehavior) {
super(testPropsBuilder().setCircuitBreaker(circuitBreaker).setMetrics(circuitBreaker.metrics)
.setCommandPropertiesDefaults(HystrixCommandPropertiesTest.getUnitTestPropertiesSetter()
.withExecutionIsolationStrategy(ExecutionIsolationStrategy.SEMAPHORE))
.setExecutionSemaphore(semaphore));
this.executionSleep = executionSleep;
this.resultBehavior = resultBehavior;
this.fallbackBehavior = fallbackBehavior;
}
@Override
protected Boolean run() {
try {
Thread.sleep(executionSleep);
} catch (InterruptedException e) {
e.printStackTrace();
}
if (resultBehavior == RESULT_SUCCESS) {
return true;
} else if (resultBehavior == RESULT_FAILURE) {
throw new RuntimeException("TestSemaphoreCommand failure");
} else if (resultBehavior == RESULT_BAD_REQUEST_EXCEPTION) {
throw new HystrixBadRequestException("TestSemaphoreCommand BadRequestException");
} else {
throw new IllegalStateException("Didn't use a proper enum for result behavior");
}
}
@Override
protected Boolean getFallback() {
if (fallbackBehavior == FALLBACK_SUCCESS) {
return false;
} else if (fallbackBehavior == FALLBACK_FAILURE) {
throw new RuntimeException("fallback failure");
} else { //FALLBACK_NOT_IMPLEMENTED
return super.getFallback();
}
}
}
/**
* Semaphore based command that allows caller to use latches to know when it has started and signal when it
* would like the command to finish
*/
private static class LatchedSemaphoreCommand extends TestHystrixCommand<Boolean> {
private final CountDownLatch startLatch, waitLatch;
/**
*
* @param circuitBreaker circuit breaker (passed in so it may be shared)
* @param semaphore semaphore (passed in so it may be shared)
* @param startLatch
* this command calls {@link java.util.concurrent.CountDownLatch#countDown()} immediately
* upon running
* @param waitLatch
* this command calls {@link java.util.concurrent.CountDownLatch#await()} once it starts
* to run. The caller can use the latch to signal the command to finish
*/
private LatchedSemaphoreCommand(TestCircuitBreaker circuitBreaker, TryableSemaphore semaphore,
CountDownLatch startLatch, CountDownLatch waitLatch) {
super(testPropsBuilder().setCircuitBreaker(circuitBreaker).setMetrics(circuitBreaker.metrics)
.setCommandPropertiesDefaults(HystrixCommandPropertiesTest.getUnitTestPropertiesSetter().withExecutionIsolationStrategy(ExecutionIsolationStrategy.SEMAPHORE).withCircuitBreakerEnabled(false))
.setExecutionSemaphore(semaphore));
this.startLatch = startLatch;
this.waitLatch = waitLatch;
}
@Override
protected Boolean run() {
// signals caller that run has started
this.startLatch.countDown();
try {
// waits for caller to countDown latch
this.waitLatch.await();
} catch (InterruptedException e) {
e.printStackTrace();
return false;
}
return true;
}
}
/**
* The run() will take time. Contains fallback.
*/
private static class TestSemaphoreCommandWithFallback extends TestHystrixCommand<Boolean> {
private final long executionSleep;
private final Boolean fallback;
private TestSemaphoreCommandWithFallback(TestCircuitBreaker circuitBreaker, int executionSemaphoreCount, long executionSleep, Boolean fallback) {
super(testPropsBuilder().setCircuitBreaker(circuitBreaker).setMetrics(circuitBreaker.metrics)
.setCommandPropertiesDefaults(HystrixCommandPropertiesTest.getUnitTestPropertiesSetter().withExecutionIsolationStrategy(ExecutionIsolationStrategy.SEMAPHORE).withExecutionIsolationSemaphoreMaxConcurrentRequests(executionSemaphoreCount)));
this.executionSleep = executionSleep;
this.fallback = fallback;
}
@Override
protected Boolean run() {
try {
Thread.sleep(executionSleep);
} catch (InterruptedException e) {
e.printStackTrace();
}
return true;
}
@Override
protected Boolean getFallback() {
return fallback;
}
}
private static class RequestCacheNullPointerExceptionCase extends TestHystrixCommand<Boolean> {
public RequestCacheNullPointerExceptionCase(TestCircuitBreaker circuitBreaker) {
super(testPropsBuilder().setCircuitBreaker(circuitBreaker).setMetrics(circuitBreaker.metrics)
.setCommandPropertiesDefaults(HystrixCommandPropertiesTest.getUnitTestPropertiesSetter().withExecutionTimeoutInMilliseconds(200)));
// we want it to timeout
}
@Override
protected Boolean run() {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
e.printStackTrace();
}
return true;
}
@Override
protected Boolean getFallback() {
return false;
}
@Override
public String getCacheKey() {
return "A";
}
}
private static class RequestCacheTimeoutWithoutFallback extends TestHystrixCommand<Boolean> {
public RequestCacheTimeoutWithoutFallback(TestCircuitBreaker circuitBreaker) {
super(testPropsBuilder().setCircuitBreaker(circuitBreaker).setMetrics(circuitBreaker.metrics)
.setCommandPropertiesDefaults(HystrixCommandPropertiesTest.getUnitTestPropertiesSetter().withExecutionTimeoutInMilliseconds(200).withCircuitBreakerEnabled(false)));
// we want it to timeout
}
@Override
protected Boolean run() {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
System.out.println(">>>> Sleep Interrupted: " + e.getMessage());
// e.printStackTrace();
}
return true;
}
@Override
public String getCacheKey() {
return "A";
}
}
private static class RequestCacheThreadRejectionWithoutFallback extends TestHystrixCommand<Boolean> {
final CountDownLatch completionLatch;
public RequestCacheThreadRejectionWithoutFallback(TestCircuitBreaker circuitBreaker, CountDownLatch completionLatch) {
super(testPropsBuilder()
.setCircuitBreaker(circuitBreaker)
.setMetrics(circuitBreaker.metrics)
.setThreadPool(new HystrixThreadPool() {
@Override
public ThreadPoolExecutor getExecutor() {
return null;
}
@Override
public void markThreadExecution() {
}
@Override
public void markThreadCompletion() {
}
@Override
public void markThreadRejection() {
}
@Override
public boolean isQueueSpaceAvailable() {
// always return false so we reject everything
return false;
}
@Override
public Scheduler getScheduler() {
return new HystrixContextScheduler(HystrixPlugins.getInstance().getConcurrencyStrategy(), this);
}
@Override
public Scheduler getScheduler(Func0<Boolean> shouldInterruptThread) {
return new HystrixContextScheduler(HystrixPlugins.getInstance().getConcurrencyStrategy(), this, shouldInterruptThread);
}
}));
this.completionLatch = completionLatch;
}
@Override
protected Boolean run() {
try {
if (completionLatch.await(1000, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("timed out waiting on completionLatch");
}
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
return true;
}
@Override
public String getCacheKey() {
return "A";
}
}
private static class BadRequestCommand extends TestHystrixCommand<Boolean> {
public BadRequestCommand(TestCircuitBreaker circuitBreaker, ExecutionIsolationStrategy isolationType) {
super(testPropsBuilder()
.setCircuitBreaker(circuitBreaker)
.setCommandPropertiesDefaults(HystrixCommandPropertiesTest.getUnitTestPropertiesSetter().withExecutionIsolationStrategy(isolationType))
.setMetrics(circuitBreaker.metrics));
}
@Override
protected Boolean run() {
throw new HystrixBadRequestException("Message to developer that they passed in bad data or something like that.");
}
@Override
protected Boolean getFallback() {
return false;
}
@Override
protected String getCacheKey() {
return "one";
}
}
private static class BusinessException extends Exception {
public BusinessException(String msg) {
super(msg);
}
}
private static class ExceptionToBadRequestByExecutionHookCommand extends TestHystrixCommand<Boolean> {
public ExceptionToBadRequestByExecutionHookCommand(TestCircuitBreaker circuitBreaker, ExecutionIsolationStrategy isolationType) {
super(testPropsBuilder()
.setCircuitBreaker(circuitBreaker)
.setCommandPropertiesDefaults(HystrixCommandPropertiesTest.getUnitTestPropertiesSetter().withExecutionIsolationStrategy(isolationType))
.setMetrics(circuitBreaker.metrics)
.setExecutionHook(new TestableExecutionHook(){
@Override
public <T> Exception onRunError(HystrixInvokable<T> commandInstance, Exception e) {
super.onRunError(commandInstance, e);
return new HystrixBadRequestException("autoconverted exception", e);
}
}));
}
@Override
protected Boolean run() throws BusinessException {
throw new BusinessException("invalid input by the user");
}
@Override
protected String getCacheKey() {
return "nein";
}
}
private static class CommandWithCheckedException extends TestHystrixCommand<Boolean> {
public CommandWithCheckedException(TestCircuitBreaker circuitBreaker) {
super(testPropsBuilder()
.setCircuitBreaker(circuitBreaker).setMetrics(circuitBreaker.metrics));
}
@Override
protected Boolean run() throws Exception {
throw new IOException("simulated checked exception message");
}
}
private static class InterruptibleCommand extends TestHystrixCommand<Boolean> {
public InterruptibleCommand(TestCircuitBreaker circuitBreaker, boolean shouldInterrupt) {
super(testPropsBuilder()
.setCircuitBreaker(circuitBreaker).setMetrics(circuitBreaker.metrics)
.setCommandPropertiesDefaults(HystrixCommandPropertiesTest.getUnitTestPropertiesSetter()
.withExecutionIsolationThreadInterruptOnTimeout(shouldInterrupt)
.withExecutionTimeoutInMilliseconds(100)));
}
private volatile boolean hasBeenInterrupted;
public boolean hasBeenInterrupted() {
return hasBeenInterrupted;
}
@Override
protected Boolean run() throws Exception {
try {
Thread.sleep(2000);
}
catch (InterruptedException e) {
System.out.println("Interrupted!");
e.printStackTrace();
hasBeenInterrupted = true;
}
return hasBeenInterrupted;
}
}
private static class CommandWithDisabledTimeout extends TestHystrixCommand<Boolean> {
private final int latency;
public CommandWithDisabledTimeout(int timeout, int latency) {
super(testPropsBuilder().setCommandPropertiesDefaults(HystrixCommandPropertiesTest.getUnitTestPropertiesSetter()
.withExecutionTimeoutInMilliseconds(timeout)
.withExecutionTimeoutEnabled(false)));
this.latency = latency;
}
@Override
protected Boolean run() throws Exception {
try {
Thread.sleep(latency);
return true;
} catch (InterruptedException ex) {
return false;
}
}
@Override
protected Boolean getFallback() {
return false;
}
}
}
| apache-2.0 |
jdgwartney/vsphere-ws | java/JAXWS/samples/com/vmware/vim25/CreateGroupResponse.java | 768 |
package com.vmware.vim25;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "")
@XmlRootElement(name = "CreateGroupResponse")
public class CreateGroupResponse {
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-polly/src/main/java/com/amazonaws/services/polly/model/InvalidLexiconException.java | 1284 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.polly.model;
import javax.annotation.Generated;
/**
* <p>
* Amazon Polly can't find the specified lexicon. Verify that the lexicon's name is spelled correctly, and then try
* again.
* </p>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class InvalidLexiconException extends com.amazonaws.services.polly.model.AmazonPollyException {
private static final long serialVersionUID = 1L;
/**
* Constructs a new InvalidLexiconException with the specified error message.
*
* @param message
* Describes the error encountered.
*/
public InvalidLexiconException(String message) {
super(message);
}
}
| apache-2.0 |
HuangLS/neo4j | community/kernel/src/main/java/org/neo4j/kernel/impl/transaction/state/RelationshipDeleter.java | 11277 | /*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.kernel.impl.transaction.state;
import static org.neo4j.kernel.impl.transaction.state.RelationshipCreator.relCount;
import org.neo4j.kernel.impl.locking.Locks;
import org.neo4j.kernel.impl.locking.ResourceTypes;
import org.neo4j.kernel.impl.store.InvalidRecordException;
import org.neo4j.kernel.impl.store.record.NodeRecord;
import org.neo4j.kernel.impl.store.record.Record;
import org.neo4j.kernel.impl.store.record.RelationshipGroupRecord;
import org.neo4j.kernel.impl.store.record.RelationshipRecord;
import org.neo4j.kernel.impl.transaction.state.RecordAccess.RecordProxy;
import org.neo4j.kernel.impl.util.DirectionWrapper;
public class RelationshipDeleter
{
private final Locks.Client locks;
private final RelationshipGroupGetter relGroupGetter;
private final PropertyDeleter propertyChainDeleter;
public RelationshipDeleter( Locks.Client locks, RelationshipGroupGetter relGroupGetter,
PropertyDeleter propertyChainDeleter )
{
this.locks = locks;
this.relGroupGetter = relGroupGetter;
this.propertyChainDeleter = propertyChainDeleter;
}
/**
* Deletes a relationship by its id, returning its properties which are now
* removed. It is assumed that the nodes it connects have already been
* deleted in this
* transaction.
*
* @param id The id of the relationship to delete.
* @return The properties of the relationship that were removed during the
* delete.
*/
public void relDelete( long id, RecordAccessSet recordChanges )
{
RelationshipRecord record = recordChanges.getRelRecords().getOrLoad( id, null ).forChangingLinkage();
propertyChainDeleter.deletePropertyChain( record, recordChanges.getPropertyRecords() );
disconnectRelationship( record, recordChanges );
updateNodesForDeletedRelationship( record, recordChanges );
record.setInUse( false );
}
private void disconnectRelationship( RelationshipRecord rel, RecordAccessSet recordChangeSet )
{
disconnect( rel, RelationshipConnection.START_NEXT, recordChangeSet.getRelRecords() );
disconnect( rel, RelationshipConnection.START_PREV, recordChangeSet.getRelRecords() );
disconnect( rel, RelationshipConnection.END_NEXT, recordChangeSet.getRelRecords() );
disconnect( rel, RelationshipConnection.END_PREV, recordChangeSet.getRelRecords() );
}
private void disconnect( RelationshipRecord rel, RelationshipConnection pointer,
RecordAccess<Long, RelationshipRecord, Void> relChanges )
{
long otherRelId = pointer.otherSide().get( rel );
if ( otherRelId == Record.NO_NEXT_RELATIONSHIP.intValue() )
{
return;
}
locks.acquireExclusive( ResourceTypes.RELATIONSHIP, otherRelId );
RelationshipRecord otherRel = relChanges.getOrLoad( otherRelId, null ).forChangingLinkage();
boolean changed = false;
long newId = pointer.get( rel );
boolean newIsFirst = pointer.isFirstInChain( rel );
if ( otherRel.getFirstNode() == pointer.compareNode( rel ) )
{
pointer.start().set( otherRel, newId, newIsFirst );
changed = true;
}
if ( otherRel.getSecondNode() == pointer.compareNode( rel ) )
{
pointer.end().set( otherRel, newId, newIsFirst );
changed = true;
}
if ( !changed )
{
throw new InvalidRecordException( otherRel + " don't match " + rel );
}
}
private void updateNodesForDeletedRelationship( RelationshipRecord rel, RecordAccessSet recordChanges )
{
RecordProxy<Long, NodeRecord, Void> startNodeChange =
recordChanges.getNodeRecords().getOrLoad( rel.getFirstNode(), null );
RecordProxy<Long, NodeRecord, Void> endNodeChange =
recordChanges.getNodeRecords().getOrLoad( rel.getSecondNode(), null );
NodeRecord startNode = recordChanges.getNodeRecords().getOrLoad( rel.getFirstNode(), null ).forReadingLinkage();
NodeRecord endNode = recordChanges.getNodeRecords().getOrLoad( rel.getSecondNode(), null ).forReadingLinkage();
boolean loop = startNode.getId() == endNode.getId();
if ( !startNode.isDense() )
{
if ( rel.isFirstInFirstChain() )
{
startNode = startNodeChange.forChangingLinkage();
startNode.setNextRel( rel.getFirstNextRel() );
}
decrementTotalRelationshipCount( startNode.getId(), rel, startNode.getNextRel(),
recordChanges.getRelRecords() );
}
else
{
RecordProxy<Long, RelationshipGroupRecord, Integer> groupChange =
relGroupGetter.getRelationshipGroup( startNode, rel.getType(),
recordChanges.getRelGroupRecords() ).group();
assert groupChange != null : "Relationship group " + rel.getType() + " should have existed here";
RelationshipGroupRecord group = groupChange.forReadingData();
DirectionWrapper dir = DirectionIdentifier.wrapDirection( rel, startNode );
if ( rel.isFirstInFirstChain() )
{
group = groupChange.forChangingData();
dir.setNextRel( group, rel.getFirstNextRel() );
if ( groupIsEmpty( group ) )
{
deleteGroup( startNodeChange, group, recordChanges.getRelGroupRecords() );
}
}
decrementTotalRelationshipCount( startNode.getId(), rel, dir.getNextRel( group ),
recordChanges.getRelRecords() );
}
if ( !endNode.isDense() )
{
if ( rel.isFirstInSecondChain() )
{
endNode = endNodeChange.forChangingLinkage();
endNode.setNextRel( rel.getSecondNextRel() );
}
if ( !loop )
{
decrementTotalRelationshipCount( endNode.getId(), rel, endNode.getNextRel(),
recordChanges.getRelRecords() );
}
}
else
{
RecordProxy<Long, RelationshipGroupRecord, Integer> groupChange =
relGroupGetter.getRelationshipGroup( endNode, rel.getType(),
recordChanges.getRelGroupRecords() ).group();
DirectionWrapper dir = DirectionIdentifier.wrapDirection( rel, endNode );
assert groupChange != null || loop : "Group has been deleted";
if ( groupChange != null )
{
RelationshipGroupRecord group = groupChange.forReadingData();
if ( rel.isFirstInSecondChain() )
{
group = groupChange.forChangingData();
dir.setNextRel( group, rel.getSecondNextRel() );
if ( groupIsEmpty( group ) )
{
deleteGroup( endNodeChange, group, recordChanges.getRelGroupRecords() );
}
}
} // Else this is a loop-rel and the group was deleted when dealing with the start node
if ( !loop )
{
decrementTotalRelationshipCount( endNode.getId(), rel, dir.getNextRel( groupChange.forChangingData() ),
recordChanges.getRelRecords() );
}
}
}
private boolean decrementTotalRelationshipCount( long nodeId, RelationshipRecord rel, long firstRelId,
RecordAccess<Long, RelationshipRecord, Void> relRecords )
{
if ( firstRelId == Record.NO_PREV_RELATIONSHIP.intValue() )
{
return true;
}
boolean firstInChain = relIsFirstInChain( nodeId, rel );
if ( !firstInChain )
{
locks.acquireExclusive( ResourceTypes.RELATIONSHIP, firstRelId );
}
RelationshipRecord firstRel = relRecords.getOrLoad( firstRelId, null ).forChangingLinkage();
if ( nodeId == firstRel.getFirstNode() )
{
firstRel.setFirstPrevRel( firstInChain ?
relCount( nodeId, rel )-1 : relCount( nodeId, firstRel ) - 1 );
firstRel.setFirstInFirstChain( true );
}
if ( nodeId == firstRel.getSecondNode() )
{
firstRel.setSecondPrevRel( firstInChain ?
relCount( nodeId, rel )-1 :
relCount( nodeId, firstRel )-1 );
firstRel.setFirstInSecondChain( true );
}
return false;
}
private void deleteGroup( RecordProxy<Long, NodeRecord, Void> nodeChange,
RelationshipGroupRecord group,
RecordAccess<Long, RelationshipGroupRecord, Integer> relGroupRecords )
{
long previous = group.getPrev();
long next = group.getNext();
if ( previous == Record.NO_NEXT_RELATIONSHIP.intValue() )
{ // This is the first one, just point the node to the next group
nodeChange.forChangingLinkage().setNextRel( next );
}
else
{ // There are others before it, point the previous to the next group
RelationshipGroupRecord previousRecord = relGroupRecords.getOrLoad( previous, null ).forChangingLinkage();
previousRecord.setNext( next );
}
if ( next != Record.NO_NEXT_RELATIONSHIP.intValue() )
{ // There are groups after this one, point that next group to the previous of the group to be deleted
RelationshipGroupRecord nextRecord = relGroupRecords.getOrLoad( next, null ).forChangingLinkage();
nextRecord.setPrev( previous );
}
group.setInUse( false );
}
private boolean groupIsEmpty( RelationshipGroupRecord group )
{
return group.getFirstOut() == Record.NO_NEXT_RELATIONSHIP.intValue() &&
group.getFirstIn() == Record.NO_NEXT_RELATIONSHIP.intValue() &&
group.getFirstLoop() == Record.NO_NEXT_RELATIONSHIP.intValue();
}
private boolean relIsFirstInChain( long nodeId, RelationshipRecord rel )
{
return (nodeId == rel.getFirstNode() && rel.isFirstInFirstChain()) ||
(nodeId == rel.getSecondNode() && rel.isFirstInSecondChain());
}
}
| apache-2.0 |
cmhdave/xliff-gradle-plugin | src/main/java/org/oasisopen/xliff/StateQualifierValueList.java | 4767 | //
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2015.09.03 at 09:19:50 PM EDT
//
package org.oasisopen.xliff;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlEnumValue;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for state-qualifierValueList.
*
* <p>The following schema fragment specifies the expected content contained within this class.
* <p>
* <pre>
* <simpleType name="state-qualifierValueList">
* <restriction base="{http://www.w3.org/2001/XMLSchema}NMTOKEN">
* <enumeration value="exact-match"/>
* <enumeration value="fuzzy-match"/>
* <enumeration value="id-match"/>
* <enumeration value="leveraged-glossary"/>
* <enumeration value="leveraged-inherited"/>
* <enumeration value="leveraged-mt"/>
* <enumeration value="leveraged-repository"/>
* <enumeration value="leveraged-tm"/>
* <enumeration value="mt-suggestion"/>
* <enumeration value="rejected-grammar"/>
* <enumeration value="rejected-inaccurate"/>
* <enumeration value="rejected-length"/>
* <enumeration value="rejected-spelling"/>
* <enumeration value="tm-suggestion"/>
* </restriction>
* </simpleType>
* </pre>
*
*/
@XmlType(name = "state-qualifierValueList")
@XmlEnum
public enum StateQualifierValueList {
/**
* Indicates an exact match. An exact match occurs when a source text of a segment is exactly the same as the source text of a segment that was translated previously.
*
*/
@XmlEnumValue("exact-match")
EXACT_MATCH("exact-match"),
/**
* Indicates a fuzzy match. A fuzzy match occurs when a source text of a segment is very similar to the source text of a segment that was translated previously (e.g. when the difference is casing, a few changed words, white-space discripancy, etc.).
*
*/
@XmlEnumValue("fuzzy-match")
FUZZY_MATCH("fuzzy-match"),
/**
* Indicates a match based on matching IDs (in addition to matching text).
*
*/
@XmlEnumValue("id-match")
ID_MATCH("id-match"),
/**
* Indicates a translation derived from a glossary.
*
*/
@XmlEnumValue("leveraged-glossary")
LEVERAGED_GLOSSARY("leveraged-glossary"),
/**
* Indicates a translation derived from existing translation.
*
*/
@XmlEnumValue("leveraged-inherited")
LEVERAGED_INHERITED("leveraged-inherited"),
/**
* Indicates a translation derived from machine translation.
*
*/
@XmlEnumValue("leveraged-mt")
LEVERAGED_MT("leveraged-mt"),
/**
* Indicates a translation derived from a translation repository.
*
*/
@XmlEnumValue("leveraged-repository")
LEVERAGED_REPOSITORY("leveraged-repository"),
/**
* Indicates a translation derived from a translation memory.
*
*/
@XmlEnumValue("leveraged-tm")
LEVERAGED_TM("leveraged-tm"),
/**
* Indicates the translation is suggested by machine translation.
*
*/
@XmlEnumValue("mt-suggestion")
MT_SUGGESTION("mt-suggestion"),
/**
* Indicates that the item has been rejected because of incorrect grammar.
*
*/
@XmlEnumValue("rejected-grammar")
REJECTED_GRAMMAR("rejected-grammar"),
/**
* Indicates that the item has been rejected because it is incorrect.
*
*/
@XmlEnumValue("rejected-inaccurate")
REJECTED_INACCURATE("rejected-inaccurate"),
/**
* Indicates that the item has been rejected because it is too long or too short.
*
*/
@XmlEnumValue("rejected-length")
REJECTED_LENGTH("rejected-length"),
/**
* Indicates that the item has been rejected because of incorrect spelling.
*
*/
@XmlEnumValue("rejected-spelling")
REJECTED_SPELLING("rejected-spelling"),
/**
* Indicates the translation is suggested by translation memory.
*
*/
@XmlEnumValue("tm-suggestion")
TM_SUGGESTION("tm-suggestion");
private final String value;
StateQualifierValueList(String v) {
value = v;
}
public String value() {
return value;
}
public static StateQualifierValueList fromValue(String v) {
for (StateQualifierValueList c: StateQualifierValueList.values()) {
if (c.value.equals(v)) {
return c;
}
}
throw new IllegalArgumentException(v);
}
}
| apache-2.0 |
dkpro/dkpro-lab | dkpro-lab-core/src/main/java/org/dkpro/lab/engine/ExecutionException.java | 1388 | /*******************************************************************************
* Copyright 2011
* Ubiquitous Knowledge Processing (UKP) Lab
* Technische Universität Darmstadt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package org.dkpro.lab.engine;
import org.dkpro.lab.task.Task;
/**
* Exceptions thrown during the main execution step of a {@link Task} that is being executed in a
* {@link TaskExecutionEngine}.
*/
public class ExecutionException
extends Exception
{
private static final long serialVersionUID = -6251644756208619215L;
public ExecutionException(String aMessage)
{
super(aMessage);
}
public ExecutionException(Throwable aCause)
{
super(aCause);
}
public ExecutionException(String aMessage, Throwable aCause)
{
super(aMessage, aCause);
}
}
| apache-2.0 |
onegambler/XML-Data-Modifier | src/main/java/com/xmldatamodifier/core/ContentReplaceRule.java | 1602 | package com.xmldatamodifier.core;
import com.google.common.base.Strings;
import static java.util.Objects.requireNonNull;
public class ContentReplaceRule extends ContentRule {
private final String regex;
private final String replacement;
public ContentReplaceRule(String regex, String replacement) {
requireNonNull(regex, "Regex string cannot be null");
requireNonNull(replacement, "Replacement string cannot be null");
this.regex = regex;
this.replacement = replacement;
}
private static final String STAR_REGEX = "*";
@Override
public String elaborate(String content) {
if (STAR_REGEX.equals(regex)) {
return replacement;
}
return Strings.nullToEmpty(content).replaceAll(regex, replacement);
}
@Override
public String toString() {
return "ContentReplaceRule{" + "regex='" + regex + '\'' + ", replacement='" + replacement + '\'' + '}';
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ContentReplaceRule that = (ContentReplaceRule) o;
if (regex != null ? !regex.equals(that.regex) : that.regex != null) return false;
return !(replacement != null ? !replacement.equals(that.replacement) : that.replacement != null);
}
@Override
public int hashCode() {
int result = regex != null ? regex.hashCode() : 0;
result = 31 * result + (replacement != null ? replacement.hashCode() : 0);
return result;
}
}
| apache-2.0 |
leafclick/intellij-community | jps/jps-builders/src/org/jetbrains/jps/builders/java/dependencyView/Mappings.java | 127337 | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.jps.builders.java.dependencyView;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.io.EnumeratorIntegerDescriptor;
import gnu.trove.*;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.jps.builders.storage.BuildDataCorruptedException;
import org.jetbrains.jps.incremental.relativizer.PathRelativizerService;
import org.jetbrains.jps.incremental.storage.PathStringDescriptor;
import org.jetbrains.jps.service.JpsServiceManager;
import org.jetbrains.org.objectweb.asm.ClassReader;
import org.jetbrains.org.objectweb.asm.Opcodes;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintStream;
import java.lang.annotation.RetentionPolicy;
import java.util.*;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.function.Supplier;
/**
* @author: db
*/
public class Mappings {
private final static Logger LOG = Logger.getInstance(Mappings.class);
public static final String PROCESS_CONSTANTS_NON_INCREMENTAL_PROPERTY = "compiler.process.constants.non.incremental";
private final boolean myProcessConstantsIncrementally = !Boolean.valueOf(System.getProperty(PROCESS_CONSTANTS_NON_INCREMENTAL_PROPERTY, "false"));
private final static String CLASS_TO_SUBCLASSES = "classToSubclasses.tab";
private final static String CLASS_TO_CLASS = "classToClass.tab";
private final static String SHORT_NAMES = "shortNames.tab";
private final static String SOURCE_TO_CLASS = "sourceToClass.tab";
private final static String CLASS_TO_SOURCE = "classToSource.tab";
private static final int DEFAULT_SET_CAPACITY = 32;
private static final float DEFAULT_SET_LOAD_FACTOR = 0.98f;
private static final String IMPORT_WILDCARD_SUFFIX = ".*";
private final boolean myIsDelta;
private boolean myIsDifferentiated = false;
private boolean myIsRebuild = false;
private final TIntHashSet myChangedClasses;
private final THashSet<File> myChangedFiles;
private final Set<Pair<ClassFileRepr, File>> myDeletedClasses;
private final Set<ClassRepr> myAddedClasses;
private final Object myLock;
private final File myRootDir;
private DependencyContext myContext;
private final int myInitName;
private final int myEmptyName;
private final int myObjectClassName;
private LoggerWrapper<Integer> myDebugS;
private IntIntMultiMaplet myClassToSubclasses;
/**
key: the name of a class who is used;
values: class names that use the class registered as the key
*/
private IntIntMultiMaplet myClassToClassDependency;
private ObjectObjectMultiMaplet<String, ClassFileRepr> myRelativeSourceFilePathToClasses;
private IntObjectMultiMaplet<String> myClassToRelativeSourceFilePath;
/**
* [short className] -> list of FQ names
*/
private IntIntMultiMaplet myShortClassNameIndex;
private IntIntTransientMultiMaplet myRemovedSuperClasses;
private IntIntTransientMultiMaplet myAddedSuperClasses;
@Nullable
private Collection<String> myRemovedFiles;
public PathRelativizerService getRelativizer() {
return myRelativizer;
}
private final PathRelativizerService myRelativizer;
private Mappings(final Mappings base) throws IOException {
myLock = base.myLock;
myIsDelta = true;
myChangedClasses = new TIntHashSet(DEFAULT_SET_CAPACITY, DEFAULT_SET_LOAD_FACTOR);
myChangedFiles = new THashSet<>(FileUtil.FILE_HASHING_STRATEGY);
myDeletedClasses = new HashSet<>(DEFAULT_SET_CAPACITY, DEFAULT_SET_LOAD_FACTOR);
myAddedClasses = new HashSet<>(DEFAULT_SET_CAPACITY, DEFAULT_SET_LOAD_FACTOR);
myRootDir = new File(FileUtil.toSystemIndependentName(base.myRootDir.getAbsolutePath()) + File.separatorChar + "myDelta");
myContext = base.myContext;
myInitName = myContext.get("<init>");
myEmptyName = myContext.get("");
myObjectClassName = myContext.get("java/lang/Object");
myDebugS = base.myDebugS;
myRelativizer = base.myRelativizer;
createImplementation();
}
public Mappings(final File rootDir, PathRelativizerService relativizer) throws IOException {
myLock = new Object();
myIsDelta = false;
myChangedClasses = null;
myChangedFiles = null;
myDeletedClasses = null;
myAddedClasses = null;
myRootDir = rootDir;
myRelativizer = relativizer;
createImplementation();
myInitName = myContext.get("<init>");
myEmptyName = myContext.get("");
myObjectClassName = myContext.get("java/lang/Object");
}
private void createImplementation() throws IOException {
try {
if (!myIsDelta) {
myContext = new DependencyContext(myRootDir, myRelativizer);
myDebugS = myContext.getLogger(LOG);
}
myRemovedSuperClasses = myIsDelta ? new IntIntTransientMultiMaplet() : null;
myAddedSuperClasses = myIsDelta ? new IntIntTransientMultiMaplet() : null;
final CollectionFactory<String> fileCollectionFactory = new CollectionFactory<String>() {
@Override
public Collection<String> create() {
return new THashSet<>(FileUtil.PATH_HASHING_STRATEGY); // todo: do we really need set and not a list here?
}
};
if (myIsDelta) {
myClassToSubclasses = new IntIntTransientMultiMaplet();
myClassToClassDependency = new IntIntTransientMultiMaplet();
myShortClassNameIndex = null;
myRelativeSourceFilePathToClasses = new ObjectObjectTransientMultiMaplet<>(FileUtil.PATH_HASHING_STRATEGY, () -> new THashSet<>(5, DEFAULT_SET_LOAD_FACTOR));
myClassToRelativeSourceFilePath = new IntObjectTransientMultiMaplet<>(fileCollectionFactory);
}
else {
if (myIsDelta) {
myRootDir.mkdirs();
}
myClassToSubclasses = new IntIntPersistentMultiMaplet(DependencyContext.getTableFile(myRootDir, CLASS_TO_SUBCLASSES),
EnumeratorIntegerDescriptor.INSTANCE);
myClassToClassDependency = new IntIntPersistentMultiMaplet(DependencyContext.getTableFile(myRootDir, CLASS_TO_CLASS),
EnumeratorIntegerDescriptor.INSTANCE);
myShortClassNameIndex = myIsDelta? null : new IntIntPersistentMultiMaplet(DependencyContext.getTableFile(myRootDir, SHORT_NAMES),
EnumeratorIntegerDescriptor.INSTANCE);
myRelativeSourceFilePathToClasses = new ObjectObjectPersistentMultiMaplet<String, ClassFileRepr>(
DependencyContext.getTableFile(myRootDir, SOURCE_TO_CLASS), PathStringDescriptor.INSTANCE, new ClassFileReprExternalizer(myContext),
() -> new THashSet<>(5, DEFAULT_SET_LOAD_FACTOR)
) {
@NotNull
@Override
protected String debugString(String path) {
// on case-insensitive file systems save paths in normalized (lowercase) format in order to make tests run deterministically
return SystemInfo.isFileSystemCaseSensitive ? path : path.toLowerCase(Locale.US);
}
};
myClassToRelativeSourceFilePath = new IntObjectPersistentMultiMaplet<>(
DependencyContext.getTableFile(myRootDir, CLASS_TO_SOURCE), EnumeratorIntegerDescriptor.INSTANCE, PathStringDescriptor.INSTANCE, fileCollectionFactory
);
}
}
catch (Throwable e) {
try {
// ensure already initialized maps are properly closed
close();
}
catch (Throwable ignored) {
}
throw e;
}
}
public String valueOf(final int name) {
return myContext.getValue(name);
}
public int getName(final String string) {
return myContext.get(string);
}
public Mappings createDelta() {
synchronized (myLock) {
try {
return new Mappings(this);
}
catch (IOException e) {
throw new BuildDataCorruptedException(e);
}
}
}
private void compensateRemovedContent(final @NotNull Collection<? extends File> compiled, final @NotNull Collection<? extends File> compiledWithErrors) {
for (final File file : compiled) {
if (!compiledWithErrors.contains(file)) {
String relative = toRelative(file);
if (!myRelativeSourceFilePathToClasses.containsKey(relative)) {
myRelativeSourceFilePathToClasses.put(relative, new HashSet<>());
}
}
}
}
@Nullable
private ClassRepr getClassReprByName(final @Nullable File source, final int qName) {
final ClassFileRepr reprByName = getReprByName(source, qName);
return reprByName instanceof ClassRepr? (ClassRepr)reprByName : null;
}
@Nullable
private ClassFileRepr getReprByName(@Nullable File source, int qName) {
final Collection<File> sources = source != null? Collections.singleton(source) : classToSourceFileGet(qName);
if (sources != null) {
for (File src : sources) {
final Collection<ClassFileRepr> reprs = sourceFileToClassesGet(src);
if (reprs != null) {
for (ClassFileRepr repr : reprs) {
if (repr.name == qName) {
return repr;
}
}
}
}
}
return null;
}
private Collection<ClassFileRepr> sourceFileToClassesGet(File unchangedSource) {
return myRelativeSourceFilePathToClasses.get(toRelative(unchangedSource));
}
@NotNull
private String toRelative(File file) {
return myRelativizer.toRelative(file.getAbsolutePath());
}
@Nullable
private Collection<File> classToSourceFileGet(int qName) {
Collection<String> get = myClassToRelativeSourceFilePath.get(qName);
return get == null ? null : ContainerUtil.map(get, s -> toFull(s));
}
@NotNull
private File toFull(String relativePath) {
return new File(myRelativizer.toFull(relativePath));
}
public void clean() throws IOException {
if (myRootDir != null) {
synchronized (myLock) {
close();
FileUtil.delete(myRootDir);
createImplementation();
}
}
}
public IntIntTransientMultiMaplet getRemovedSuperClasses() {
return myRemovedSuperClasses;
}
public IntIntTransientMultiMaplet getAddedSuperClasses() {
return myAddedSuperClasses;
}
private final LinkedBlockingQueue<Runnable> myPostPasses = new LinkedBlockingQueue<>();
private void runPostPasses() {
final Set<Pair<ClassFileRepr, File>> deleted = myDeletedClasses;
if (deleted != null) {
for (Pair<ClassFileRepr, File> pair : deleted) {
final int deletedClassName = pair.first.name;
final Collection<File> sources = classToSourceFileGet(deletedClassName);
if (sources == null || sources.isEmpty()) { // if really deleted and not e.g. moved
myChangedClasses.remove(deletedClassName);
}
}
}
for (Runnable pass = myPostPasses.poll(); pass != null; pass = myPostPasses.poll()) {
pass.run();
}
}
private static final ClassRepr MOCK_CLASS = null;
private static final MethodRepr MOCK_METHOD = null;
private interface MemberComparator {
boolean isSame(ProtoMember member);
}
private class Util {
@Nullable
private final Mappings myMappings;
private Util() {
myMappings = null;
}
private Util(@NotNull Mappings mappings) {
myMappings = mappings;
}
TIntHashSet appendDependents(final ClassFileRepr c, final TIntHashSet result) {
return appendDependents(c.name, result);
}
@Nullable
TIntHashSet appendDependents(int className, TIntHashSet result) {
final TIntHashSet depClasses = myClassToClassDependency.get(className);
if (depClasses != null) {
addAll(result, depClasses);
}
return depClasses;
}
void propagateMemberAccessRec(final TIntHashSet acc, final boolean isField, final boolean root, final MemberComparator comparator, final int reflcass) {
final ClassRepr repr = classReprByName(reflcass);
if (repr != null) {
if (!root) {
final Set<? extends ProtoMember> members = isField ? repr.getFields() : repr.getMethods();
for (ProtoMember m : members) {
if (comparator.isSame(m)) {
return;
}
}
if (!acc.add(reflcass)) {
return; // SOE prevention
}
}
final TIntHashSet subclasses = myClassToSubclasses.get(reflcass);
if (subclasses != null) {
subclasses.forEach(subclass -> {
propagateMemberAccessRec(acc, isField, false, comparator, subclass);
return true;
});
}
}
}
TIntHashSet propagateMemberAccess(final boolean isField, final MemberComparator comparator, final int className) {
final TIntHashSet acc = new TIntHashSet(DEFAULT_SET_CAPACITY, DEFAULT_SET_LOAD_FACTOR);
propagateMemberAccessRec(acc, isField, true, comparator, className);
return acc;
}
TIntHashSet propagateFieldAccess(final int name, final int className) {
return propagateMemberAccess(true, member -> member.name == name, className);
}
TIntHashSet propagateMethodAccess(final MethodRepr m, final int className) {
return propagateMemberAccess(false, member -> m.equals(member), className);
}
MethodRepr.Predicate lessSpecific(final MethodRepr than) {
return new MethodRepr.Predicate() {
@Override
public boolean satisfy(final MethodRepr m) {
if (m.name == myInitName || m.name != than.name || m.myArgumentTypes.length != than.myArgumentTypes.length) {
return false;
}
for (int i = 0; i < than.myArgumentTypes.length; i++) {
final Boolean subtypeOf = isSubtypeOf(than.myArgumentTypes[i], m.myArgumentTypes[i]);
if (subtypeOf != null && !subtypeOf) {
return false;
}
}
return true;
}
};
}
private void addOverridingMethods(final MethodRepr m, final ClassRepr fromClass, final MethodRepr.Predicate predicate, final Collection<? super Pair<MethodRepr, ClassRepr>> container, TIntHashSet visitedClasses) {
if (m.name == myInitName) {
return; // overriding is not defined for constructors
}
final TIntHashSet subClasses = myClassToSubclasses.get(fromClass.name);
if (subClasses == null) {
return;
}
if (visitedClasses == null) {
visitedClasses = new TIntHashSet();
}
if (!visitedClasses.add(fromClass.name)) {
return;
}
final TIntHashSet _visitedClasses = visitedClasses;
subClasses.forEach(subClassName -> {
final ClassRepr r = classReprByName(subClassName);
if (r != null) {
boolean cont = true;
final Collection<MethodRepr> methods = r.findMethods(predicate);
for (MethodRepr mm : methods) {
if (isVisibleIn(fromClass, m, r)) {
container.add(Pair.create(mm, r));
cont = false;
}
}
if (cont) {
addOverridingMethods(m, r, predicate, container, _visitedClasses);
}
}
return true;
});
}
private Collection<Pair<MethodRepr, ClassRepr>> findAllMethodsBySpecificity(final MethodRepr m, final ClassRepr c) {
final MethodRepr.Predicate predicate = lessSpecific(m);
final Collection<Pair<MethodRepr, ClassRepr>> result = new HashSet<>();
addOverridenMethods(c, predicate, result, null);
addOverridingMethods(m, c, predicate, result, null);
return result;
}
private Collection<Pair<MethodRepr, ClassRepr>> findOverriddenMethods(final MethodRepr m, final ClassRepr c) {
if (m.name == myInitName) {
return Collections.emptySet(); // overriding is not defined for constructors
}
final Collection<Pair<MethodRepr, ClassRepr>> result = new HashSet<>();
addOverridenMethods(c, MethodRepr.equalByJavaRules(m), result, null);
return result;
}
private boolean hasOverriddenMethods(final ClassRepr fromClass, final MethodRepr.Predicate predicate, TIntHashSet visitedClasses) {
if (visitedClasses == null) {
visitedClasses = new TIntHashSet();
visitedClasses.add(fromClass.name);
}
for (int superName : fromClass.getSupers()) {
if (!visitedClasses.add(superName) || superName == myObjectClassName) {
continue;
}
final ClassRepr superClass = classReprByName(superName);
if (superClass != null) {
for (MethodRepr mm : superClass.findMethods(predicate)) {
if (isVisibleIn(superClass, mm, fromClass)) {
return true;
}
}
if (hasOverriddenMethods(superClass, predicate, visitedClasses)) {
return true;
}
}
}
return false;
}
private boolean extendsLibraryClass(final ClassRepr fromClass, TIntHashSet visitedClasses) {
if (visitedClasses == null) {
visitedClasses = new TIntHashSet();
visitedClasses.add(fromClass.name);
}
for (int superName : fromClass.getSupers()) {
if (!visitedClasses.add(superName) || superName == myObjectClassName) {
continue;
}
final ClassRepr superClass = classReprByName(superName);
if (superClass == null || extendsLibraryClass(superClass, visitedClasses)) {
return true;
}
}
return false;
}
private void addOverridenMethods(final ClassRepr fromClass, final MethodRepr.Predicate predicate, final Collection<? super Pair<MethodRepr, ClassRepr>> container, TIntHashSet visitedClasses) {
if (visitedClasses == null) {
visitedClasses = new TIntHashSet();
visitedClasses.add(fromClass.name);
}
for (int superName : fromClass.getSupers()) {
if (!visitedClasses.add(superName) || superName == myObjectClassName) {
continue; // prevent SOE
}
final ClassRepr superClass = classReprByName(superName);
if (superClass != null) {
boolean cont = true;
final Collection<MethodRepr> methods = superClass.findMethods(predicate);
for (MethodRepr mm : methods) {
if (isVisibleIn(superClass, mm, fromClass)) {
container.add(Pair.create(mm, superClass));
cont = false;
}
}
if (cont) {
addOverridenMethods(superClass, predicate, container, visitedClasses);
}
}
else {
container.add(Pair.create(MOCK_METHOD, MOCK_CLASS));
}
}
}
void addOverriddenFields(final FieldRepr f, final ClassRepr fromClass, final Collection<? super Pair<FieldRepr, ClassRepr>> container, TIntHashSet visitedClasses) {
if (visitedClasses == null) {
visitedClasses = new TIntHashSet();
visitedClasses.add(fromClass.name);
}
for (int supername : fromClass.getSupers()) {
if (!visitedClasses.add(supername) || supername == myObjectClassName) {
continue;
}
final ClassRepr superClass = classReprByName(supername);
if (superClass != null) {
final FieldRepr ff = superClass.findField(f.name);
if (ff != null && isVisibleIn(superClass, ff, fromClass)) {
container.add(Pair.create(ff, superClass));
}
else{
addOverriddenFields(f, superClass, container, visitedClasses);
}
}
}
}
boolean hasOverriddenFields(final FieldRepr f, final ClassRepr fromClass, TIntHashSet visitedClasses) {
if (visitedClasses == null) {
visitedClasses = new TIntHashSet();
visitedClasses.add(fromClass.name);
}
for (int supername : fromClass.getSupers()) {
if (!visitedClasses.add(supername) || supername == myObjectClassName) {
continue;
}
final ClassRepr superClass = classReprByName(supername);
if (superClass != null) {
final FieldRepr ff = superClass.findField(f.name);
if (ff != null && isVisibleIn(superClass, ff, fromClass)) {
return true;
}
final boolean found = hasOverriddenFields(f, superClass, visitedClasses);
if (found) {
return true;
}
}
}
return false;
}
@Nullable
ClassRepr classReprByName(final int name) {
final ClassFileRepr r = reprByName(name);
return r instanceof ClassRepr? (ClassRepr)r : null;
}
@Nullable
ModuleRepr moduleReprByName(final int name) {
final ClassFileRepr r = reprByName(name);
return r instanceof ModuleRepr? (ModuleRepr)r : null;
}
@Nullable
ClassFileRepr reprByName(final int name) {
if (myMappings != null) {
final ClassFileRepr r = myMappings.getReprByName(null, name);
if (r != null) {
return r;
}
}
return getReprByName(null, name);
}
@Nullable
private Boolean isInheritorOf(final int who, final int whom, TIntHashSet visitedClasses) {
if (who == whom) {
return Boolean.TRUE;
}
final ClassRepr repr = classReprByName(who);
if (repr != null) {
if (visitedClasses == null) {
visitedClasses = new TIntHashSet();
visitedClasses.add(who);
}
for (int s : repr.getSupers()) {
if (!visitedClasses.add(s)) {
continue;
}
final Boolean inheritorOf = isInheritorOf(s, whom, visitedClasses);
if (inheritorOf != null && inheritorOf) {
return inheritorOf;
}
}
}
return null;
}
@Nullable
Boolean isSubtypeOf(final TypeRepr.AbstractType who, final TypeRepr.AbstractType whom) {
if (who.equals(whom)) {
return Boolean.TRUE;
}
if (who instanceof TypeRepr.PrimitiveType || whom instanceof TypeRepr.PrimitiveType) {
return Boolean.FALSE;
}
if (who instanceof TypeRepr.ArrayType) {
if (whom instanceof TypeRepr.ArrayType) {
return isSubtypeOf(((TypeRepr.ArrayType)who).elementType, ((TypeRepr.ArrayType)whom).elementType);
}
final String descr = whom.getDescr(myContext);
if (descr.equals("Ljava/lang/Cloneable") || descr.equals("Ljava/lang/Object") || descr.equals("Ljava/io/Serializable")) {
return Boolean.TRUE;
}
return Boolean.FALSE;
}
if (whom instanceof TypeRepr.ClassType) {
return isInheritorOf(((TypeRepr.ClassType)who).className, ((TypeRepr.ClassType)whom).className, null);
}
return Boolean.FALSE;
}
boolean isMethodVisible(final ClassRepr classRepr, final MethodRepr m) {
return classRepr.findMethods(MethodRepr.equalByJavaRules(m)).size() > 0 || hasOverriddenMethods(classRepr, MethodRepr.equalByJavaRules(m), null);
}
boolean isFieldVisible(final int className, final FieldRepr field) {
final ClassRepr r = classReprByName(className);
if (r == null || r.getFields().contains(field)) {
return true;
}
return hasOverriddenFields(field, r, null);
}
void collectSupersRecursively(final int className, @NotNull final TIntHashSet container) {
final ClassRepr classRepr = classReprByName(className);
if (classRepr != null) {
final int[] supers = classRepr.getSupers();
if (container.addAll(supers)) {
for (int aSuper : supers) {
collectSupersRecursively(aSuper, container);
}
}
}
}
void affectSubclasses(final int className,
final Collection<? super File> affectedFiles,
final Collection<? super UsageRepr.Usage> affectedUsages,
final TIntHashSet dependants,
final boolean usages,
final Collection<? extends File> alreadyCompiledFiles,
TIntHashSet visitedClasses) {
debug("Affecting subclasses of class: ", className);
final Collection<File> allSources = classToSourceFileGet(className);
if (allSources == null || allSources.isEmpty()) {
debug("No source file detected for class ", className);
debug("End of affectSubclasses");
return;
}
for (File fName : allSources) {
debug("Source file name: ", fName);
if (!alreadyCompiledFiles.contains(fName)) {
affectedFiles.add(fName);
}
}
if (usages) {
debug("Class usages affection requested");
final ClassRepr classRepr = classReprByName(className);
if (classRepr != null) {
debug("Added class usage for ", classRepr.name);
affectedUsages.add(classRepr.createUsage());
}
}
appendDependents(className, dependants);
final TIntHashSet directSubclasses = myClassToSubclasses.get(className);
if (directSubclasses != null) {
if (visitedClasses == null) {
visitedClasses = new TIntHashSet();
visitedClasses.add(className);
}
final TIntHashSet _visitedClasses = visitedClasses;
directSubclasses.forEach(subClass -> {
if (_visitedClasses.add(subClass)) {
affectSubclasses(subClass, affectedFiles, affectedUsages, dependants, usages, alreadyCompiledFiles, _visitedClasses);
}
return true;
});
}
}
void affectFieldUsages(final FieldRepr field, final TIntHashSet classes, final UsageRepr.Usage rootUsage, final Set<? super UsageRepr.Usage> affectedUsages, final TIntHashSet dependents) {
affectedUsages.add(rootUsage);
classes.forEach(p -> {
appendDependents(p, dependents);
debug("Affect field usage referenced of class ", p);
affectedUsages.add(rootUsage instanceof UsageRepr.FieldAssignUsage ? field.createAssignUsage(myContext, p) : field.createUsage(myContext, p));
return true;
});
}
void affectStaticMemberImportUsages(final int memberName, int ownerName, final TIntHashSet classes, final Set<? super UsageRepr.Usage> affectedUsages, final TIntHashSet dependents) {
debug("Affect static member import usage referenced of class ", ownerName);
affectedUsages.add(UsageRepr.createImportStaticMemberUsage(myContext, memberName, ownerName));
classes.forEach(cls -> {
appendDependents(cls, dependents);
debug("Affect static member import usage referenced of class ", cls);
affectedUsages.add(UsageRepr.createImportStaticMemberUsage(myContext, memberName, cls));
return true;
});
}
void affectStaticMemberOnDemandUsages(int ownerClass, final TIntHashSet classes, final Set<? super UsageRepr.Usage> affectedUsages, final TIntHashSet dependents) {
debug("Affect static member on-demand import usage referenced of class ", ownerClass);
affectedUsages.add(UsageRepr.createImportStaticOnDemandUsage(myContext, ownerClass));
classes.forEach(cls -> {
appendDependents(cls, dependents);
debug("Affect static member on-demand import usage referenced of class ", cls);
affectedUsages.add(UsageRepr.createImportStaticOnDemandUsage(myContext, cls));
return true;
});
}
void affectMethodUsagesThrowing(ClassRepr aClass, TypeRepr.ClassType exceptionClass, final Set<? super UsageRepr.Usage> affectedUsages, final TIntHashSet dependents) {
boolean shouldAffect = false;
for (MethodRepr method : aClass.getMethods()) {
if (method.myExceptions.contains(exceptionClass)) {
shouldAffect = true;
affectedUsages.add(method.createUsage(myContext, aClass.name));
}
}
if (shouldAffect) {
if (myDebugS.isDebugEnabled()) {
debug("Affecting usages of methods throwing "+ myContext.getValue(exceptionClass.className) + " exception; class ", aClass.name);
}
appendDependents(aClass, dependents);
}
}
void affectMethodUsages(final MethodRepr method, final TIntHashSet subclasses, final UsageRepr.Usage rootUsage, final Set<? super UsageRepr.Usage> affectedUsages, final TIntHashSet dependents) {
affectedUsages.add(rootUsage);
if (subclasses != null) {
subclasses.forEach(p -> {
appendDependents(p, dependents);
debug("Affect method usage referenced of class ", p);
final UsageRepr.Usage usage =
rootUsage instanceof UsageRepr.MetaMethodUsage ? method.createMetaUsage(myContext, p) : method.createUsage(myContext, p);
affectedUsages.add(usage);
return true;
});
}
}
void affectModule(ModuleRepr m, final Collection<? super File> affectedFiles) {
Collection<File> depFiles = myMappings != null ? myMappings.classToSourceFileGet(m.name) : null;
if (depFiles == null) {
depFiles = classToSourceFileGet(m.name);
}
if (depFiles != null) {
debug("Affecting module ", m.name);
affectedFiles.addAll(depFiles);
}
}
void affectDependentModules(Differential.DiffState state, final int moduleName, @Nullable UsageConstraint constraint, boolean checkTransitive) {
new Object() {
final TIntHashSet visited = new TIntHashSet();
void perform(final int modName) {
final TIntHashSet depNames = myClassToClassDependency.get(modName);
if (depNames != null && !depNames.isEmpty()) {
final TIntHashSet next = new TIntHashSet();
final UsageRepr.Usage moduleUsage = UsageRepr.createModuleUsage(myContext, modName);
state.myAffectedUsages.add(moduleUsage);
final UsageConstraint prevConstraint = state.myUsageConstraints.put(moduleUsage, constraint == null? UsageConstraint.ANY : constraint);
if (prevConstraint != null) {
state.myUsageConstraints.put(moduleUsage, prevConstraint.or(constraint));
}
depNames.forEach(depName -> {
if (visited.add(depName)) {
final ClassFileRepr depRepr = reprByName(depName);
if (depRepr instanceof ModuleRepr) {
state.myDependants.add(depName);
if (checkTransitive && ((ModuleRepr)depRepr).requiresTransitevely(modName)) {
next.add(depName);
}
}
}
return true;
});
next.forEach(m -> {
perform(m);
return true;
});
}
}
}.perform(moduleName);
}
public class FileFilterConstraint implements UsageConstraint {
@NotNull
private final DependentFilesFilter myFilter;
public FileFilterConstraint(@NotNull DependentFilesFilter filter) {
myFilter = filter;
}
@Override
public boolean checkResidence(int residence) {
final Collection<File> fNames = classToSourceFileGet(residence);
if (fNames == null || fNames.isEmpty()) {
return true;
}
for (File fName : fNames) {
if (myFilter.accept(fName)) {
return true;
}
}
return false;
}
}
public class PackageConstraint implements UsageConstraint {
public final String packageName;
public PackageConstraint(final String packageName) {
this.packageName = packageName;
}
@Override
public boolean checkResidence(final int residence) {
return !ClassRepr.getPackageName(myContext.getValue(residence)).equals(packageName);
}
}
public class InheritanceConstraint extends PackageConstraint {
public final int rootClass;
public InheritanceConstraint(ClassRepr rootClass) {
super(rootClass.getPackageName());
this.rootClass = rootClass.name;
}
@Override
public boolean checkResidence(final int residence) {
final Boolean inheritorOf = isInheritorOf(residence, rootClass, null);
return (inheritorOf == null || !inheritorOf) && super.checkResidence(residence);
}
}
}
void affectAll(final int className,
@NotNull final File sourceFile,
final Collection<? super File> affectedFiles,
final Collection<? extends File> alreadyCompiledFiles,
@Nullable final DependentFilesFilter filter) {
final TIntHashSet dependants = myClassToClassDependency.get(className);
if (dependants != null) {
dependants.forEach(depClass -> {
final Collection<File> allSources = classToSourceFileGet(depClass);
if (allSources == null || allSources.isEmpty()) {
return true;
}
boolean shouldAffect = false;
for (File depFile : allSources) {
if (FileUtil.filesEqual(depFile, sourceFile)) {
continue; // skipping self-dependencies
}
if (!alreadyCompiledFiles.contains(depFile) && (filter == null || filter.accept(depFile))) {
// if at least one of the source files associated with the class is affected, all other associated sources should be affected as well
shouldAffect = true;
break;
}
}
if (shouldAffect) {
for (File depFile : allSources) {
if (!FileUtil.filesEqual(depFile, sourceFile)) {
affectedFiles.add(depFile);
}
}
}
return true;
});
}
}
private static boolean isVisibleIn(final ClassRepr c, final ProtoMember m, final ClassRepr scope) {
final boolean privacy = m.isPrivate() && c.name != scope.name;
final boolean packageLocality = m.isPackageLocal() && !c.getPackageName().equals(scope.getPackageName());
return !privacy && !packageLocality;
}
private boolean isEmpty(final int s) {
return s == myEmptyName;
}
@NotNull
private TIntHashSet getAllSubclasses(final int root) {
return addAllSubclasses(root, new TIntHashSet(DEFAULT_SET_CAPACITY, DEFAULT_SET_LOAD_FACTOR));
}
private TIntHashSet addAllSubclasses(final int root, final TIntHashSet acc) {
if (!acc.add(root)) {
return acc;
}
final TIntHashSet directSubclasses = myClassToSubclasses.get(root);
if (directSubclasses != null) {
directSubclasses.forEach(s -> {
addAllSubclasses(s, acc);
return true;
});
}
return acc;
}
private boolean incrementalDecision(final int owner,
final Proto member,
final Collection<? super File> affectedFiles,
final Collection<? extends File> currentlyCompiled,
@Nullable final DependentFilesFilter filter) {
final boolean isField = member instanceof FieldRepr;
final Util self = new Util();
// Public branch --- hopeless
if (member.isPublic()) {
debug("Public access, switching to a non-incremental mode");
return false;
}
final THashSet<File> toRecompile = new THashSet<>(FileUtil.FILE_HASHING_STRATEGY);
// Protected branch
if (member.isProtected()) {
debug("Protected access, softening non-incremental decision: adding all relevant subclasses for a recompilation");
debug("Root class: ", owner);
final TIntHashSet propagated = self.propagateFieldAccess(isField ? member.name : myEmptyName, owner);
propagated.forEach(className -> {
final Collection<File> fileNames = classToSourceFileGet(className);
if (fileNames != null) {
for (File fileName : fileNames) {
debug("Adding ", fileName);
}
toRecompile.addAll(fileNames);
}
return true;
});
}
final String packageName = ClassRepr.getPackageName(myContext.getValue(isField ? owner : member.name));
debug("Softening non-incremental decision: adding all package classes for a recompilation");
debug("Package name: ", packageName);
// Package-local branch
myClassToRelativeSourceFilePath.forEachEntry(new TIntObjectProcedure<Collection<String>>() {
@Override
public boolean execute(int className, Collection<String> relFilePaths) {
if (ClassRepr.getPackageName(myContext.getValue(className)).equals(packageName)) {
for (String rel : relFilePaths) {
File file = toFull(rel);
if (filter == null || filter.accept(file)) {
debug("Adding: ", rel);
toRecompile.add(file);
}
}
}
return true;
}
});
// filtering already compiled and non-existing paths
toRecompile.removeAll(currentlyCompiled);
for (Iterator<File> it = toRecompile.iterator(); it.hasNext(); ) {
final File file = it.next();
if (!file.exists()) {
it.remove();
}
}
affectedFiles.addAll(toRecompile);
return true;
}
public interface DependentFilesFilter {
boolean accept(File file);
boolean belongsToCurrentTargetChunk(File file);
}
private class Differential {
private static final int INLINABLE_FIELD_MODIFIERS_MASK = Opcodes.ACC_FINAL;
final Mappings myDelta;
final Collection<? extends File> myFilesToCompile;
final Collection<? extends File> myCompiledFiles;
final Collection<? extends File> myCompiledWithErrors;
final Collection<? super File> myAffectedFiles;
@Nullable
final DependentFilesFilter myFilter;
final Util myFuture;
final Util myPresent;
final boolean myEasyMode; // true means: no need to search for affected files, only preprocess data for integrate
private final Iterable<AnnotationsChangeTracker> myAnnotationChangeTracker =
JpsServiceManager.getInstance().getExtensions(AnnotationsChangeTracker.class);
private class FileClasses {
final File myFileName;
final Set<ClassRepr> myFileClasses = new THashSet<>();
final Set<ModuleRepr> myFileModules = new THashSet<>();
FileClasses(File fileName, Collection<ClassFileRepr> fileContent) {
myFileName = fileName;
for (ClassFileRepr repr : fileContent) {
if (repr instanceof ClassRepr) {
myFileClasses.add((ClassRepr)repr);
}
else {
myFileModules.add((ModuleRepr)repr);
}
}
}
}
private class DiffState {
final public TIntHashSet myDependants = new TIntHashSet(DEFAULT_SET_CAPACITY, DEFAULT_SET_LOAD_FACTOR);
final public Set<UsageRepr.Usage> myAffectedUsages = new HashSet<>();
final public Set<UsageRepr.AnnotationUsage> myAnnotationQuery = new HashSet<>();
final public Map<UsageRepr.Usage, UsageConstraint> myUsageConstraints = new HashMap<>();
final Difference.Specifier<ClassRepr, ClassRepr.Diff> myClassDiff;
final Difference.Specifier<ModuleRepr, ModuleRepr.Diff> myModulesDiff;
DiffState(Difference.Specifier<ClassRepr, ClassRepr.Diff> classDiff, Difference.Specifier<ModuleRepr, ModuleRepr.Diff> modulesDiff) {
myClassDiff = classDiff;
myModulesDiff = modulesDiff;
}
}
private Differential(final Mappings delta) {
this.myDelta = delta;
this.myFilesToCompile = null;
this.myCompiledFiles = null;
this.myCompiledWithErrors = null;
this.myAffectedFiles = null;
this.myFilter = null;
myFuture = null;
myPresent = null;
myEasyMode = true;
delta.myIsRebuild = true;
}
private Differential(final Mappings delta, final Collection<String> removed, final Collection<? extends File> filesToCompile) {
delta.myRemovedFiles = removed;
this.myDelta = delta;
this.myFilesToCompile = filesToCompile;
this.myCompiledFiles = null;
this.myCompiledWithErrors = null;
this.myAffectedFiles = null;
this.myFilter = null;
myFuture = new Util(delta);
myPresent = new Util();
myEasyMode = true;
}
private Differential(final Mappings delta,
final Collection<String> removed,
final Collection<? extends File> filesToCompile,
final Collection<? extends File> compiledWithErrors,
final Collection<? extends File> compiledFiles,
final Collection<? super File> affectedFiles,
@NotNull final DependentFilesFilter filter) {
delta.myRemovedFiles = removed;
this.myDelta = delta;
this.myFilesToCompile = filesToCompile;
this.myCompiledFiles = compiledFiles;
this.myCompiledWithErrors = compiledWithErrors;
this.myAffectedFiles = affectedFiles;
this.myFilter = filter;
myFuture = new Util(delta);
myPresent = new Util();
myEasyMode = false;
}
private void processDisappearedClasses() {
if (myFilesToCompile != null) {
myDelta.compensateRemovedContent(
myFilesToCompile, myCompiledWithErrors != null ? myCompiledWithErrors : Collections.emptySet()
);
}
if (!myEasyMode) {
final Collection<String> removed = myDelta.myRemovedFiles;
if (removed != null) {
for (final String file : removed) {
final File sourceFile = new File(file);
final Collection<ClassFileRepr> classes = sourceFileToClassesGet(sourceFile);
if (classes != null) {
for (ClassFileRepr c : classes) {
debug("Affecting usages of removed class ", c.name);
affectAll(c.name, sourceFile, myAffectedFiles, myCompiledFiles, myFilter);
}
}
}
}
}
}
private void processAddedMethods(final DiffState state, final ClassRepr.Diff diff, final ClassRepr it) {
final Collection<MethodRepr> added = diff.methods().added();
if (added.isEmpty()) {
return;
}
debug("Processing added methods: ");
if (it.isAnnotation()) {
debug("Class is annotation, skipping method analysis");
return;
}
assert myFuture != null;
assert myPresent != null;
assert myAffectedFiles != null;
final Supplier<ClassRepr> oldClassRepr = lazy(() -> getClassReprByName(null, it.name));
for (final MethodRepr m : added) {
debug("Method: ", m.name);
if (!m.isPrivate() && (it.isInterface() || it.isAbstract() || m.isAbstract())) {
debug("Class is abstract, or is interface, or added non-private method is abstract => affecting all subclasses");
myFuture.affectSubclasses(it.name, myAffectedFiles, state.myAffectedUsages, state.myDependants, false, myCompiledFiles, null);
}
final Supplier<TIntHashSet> propagated = lazy(()-> myFuture.propagateMethodAccess(m, it.name));
if (!m.isPrivate()) {
final ClassRepr oldRepr = oldClassRepr.get();
if (oldRepr == null || !myPresent.hasOverriddenMethods(oldRepr, MethodRepr.equalByJavaRules(m), null)) {
if (m.myArgumentTypes.length > 0) {
debug("Conservative case on overriding methods, affecting method usages");
// do not propagate constructors access, since constructors are always concrete and not accessible via references to subclasses
myFuture.affectMethodUsages(m, m.name == myInitName? null : propagated.get(), m.createMetaUsage(myContext, it.name), state.myAffectedUsages, state.myDependants);
}
}
}
if (!m.isPrivate()) {
if (m.isStatic()) {
myFuture.affectStaticMemberOnDemandUsages(it.name, propagated.get(), state.myAffectedUsages, state.myDependants);
}
final Collection<MethodRepr> lessSpecific = it.findMethods(myFuture.lessSpecific(m));
final Collection<MethodRepr> removed = diff.methods().removed();
for (final MethodRepr mm : lessSpecific) {
if (!mm.equals(m) && !removed.contains(mm)) {
debug("Found less specific method, affecting method usages");
myFuture.affectMethodUsages(mm, propagated.get(), mm.createUsage(myContext, it.name), state.myAffectedUsages, state.myDependants);
}
}
debug("Processing affected by specificity methods");
final Collection<Pair<MethodRepr, ClassRepr>> affectedMethods = myFuture.findAllMethodsBySpecificity(m, it);
final MethodRepr.Predicate overrides = MethodRepr.equalByJavaRules(m);
for (final Pair<MethodRepr, ClassRepr> pair : affectedMethods) {
final MethodRepr method = pair.first;
final ClassRepr methodClass = pair.second;
if (methodClass == MOCK_CLASS) {
continue;
}
final Boolean inheritorOf = myPresent.isInheritorOf(methodClass.name, it.name, null);
final boolean isInheritor = inheritorOf != null && inheritorOf;
debug("Method: ", method.name);
debug("Class : ", methodClass.name);
if (overrides.satisfy(method) && isInheritor) {
debug("Current method overrides that found");
final Collection<File> files = classToSourceFileGet(methodClass.name);
if (files != null) {
myAffectedFiles.addAll(files);
for (File file : files) {
debug("Affecting file ", file);
}
}
}
else {
debug("Current method does not override that found");
final TIntHashSet yetPropagated = myPresent.propagateMethodAccess(method, it.name);
if (isInheritor) {
myPresent.appendDependents(methodClass, state.myDependants);
myFuture.affectMethodUsages(method, yetPropagated, method.createUsage(myContext, methodClass.name), state.myAffectedUsages, state.myDependants);
}
debug("Affecting method usages for that found");
myFuture.affectMethodUsages(method, yetPropagated, method.createUsage(myContext, it.name), state.myAffectedUsages, state.myDependants);
}
}
final TIntHashSet subClasses = getAllSubclasses(it.name);
subClasses.forEach(subClass -> {
final ClassRepr r = myFuture.classReprByName(subClass);
if (r == null) {
return true;
}
final Collection<File> sourceFileNames = classToSourceFileGet(subClass);
if (sourceFileNames != null && !myCompiledFiles.containsAll(sourceFileNames)) {
final int outerClass = r.getOuterClassName();
if (!isEmpty(outerClass)) {
final ClassRepr outerClassRepr = myFuture.classReprByName(outerClass);
if (outerClassRepr != null && (myFuture.isMethodVisible(outerClassRepr, m) || myFuture.extendsLibraryClass(outerClassRepr, null))) {
myAffectedFiles.addAll(sourceFileNames);
for (File sourceFileName : sourceFileNames) {
debug("Affecting file due to local overriding: ", sourceFileName);
}
}
}
}
return true;
});
}
}
debug("End of added methods processing");
}
private void processRemovedMethods(final DiffState state, final ClassRepr.Diff diff, final ClassRepr it) {
final Collection<MethodRepr> removed = diff.methods().removed();
if (removed.isEmpty()) {
return;
}
assert myFuture != null;
assert myPresent != null;
assert myAffectedFiles != null;
assert myCompiledFiles != null;
debug("Processing removed methods:");
for (final MethodRepr m : removed) {
debug("Method ", m.name);
final Collection<Pair<MethodRepr, ClassRepr>> overriddenMethods = myFuture.findOverriddenMethods(m, it);
final Supplier<TIntHashSet> propagated = lazy(()-> myFuture.propagateMethodAccess(m, it.name));
if (!m.isPrivate() && m.isStatic()) {
debug("The method was static --- affecting static method import usages");
myFuture.affectStaticMemberImportUsages(m.name, it.name, propagated.get(), state.myAffectedUsages, state.myDependants);
}
if (overriddenMethods.size() == 0) {
debug("No overridden methods found, affecting method usages");
myFuture.affectMethodUsages(m, propagated.get(), m.createUsage(myContext, it.name), state.myAffectedUsages, state.myDependants);
}
else {
boolean clear = true;
loop:
for (final Pair<MethodRepr, ClassRepr> overriden : overriddenMethods) {
final MethodRepr mm = overriden.first;
if (mm == MOCK_METHOD || !mm.myType.equals(m.myType) || !isEmpty(mm.signature) || !isEmpty(m.signature) || m.isMoreAccessibleThan(mm)) {
clear = false;
break loop;
}
}
if (!clear) {
debug("No clearly overridden methods found, affecting method usages");
myFuture.affectMethodUsages(m, propagated.get(), m.createUsage(myContext, it.name), state.myAffectedUsages, state.myDependants);
}
}
final Collection<Pair<MethodRepr, ClassRepr>> overridingMethods = new HashSet<>();
myFuture.addOverridingMethods(m, it, MethodRepr.equalByJavaRules(m), overridingMethods, null);
for (final Pair<MethodRepr, ClassRepr> p : overridingMethods) {
final Collection<File> fNames = classToSourceFileGet(p.second.name);
if (fNames != null) {
myAffectedFiles.addAll(fNames);
for (File fName : fNames) {
debug("Affecting file by overriding: ", fName);
}
}
}
if (!m.isAbstract() && !m.isStatic()) {
propagated.get().forEach(p -> {
if (p != it.name) {
final ClassRepr s = myFuture.classReprByName(p);
if (s != null) {
final Collection<Pair<MethodRepr, ClassRepr>> overridenInS = myFuture.findOverriddenMethods(m, s);
overridenInS.addAll(overriddenMethods);
boolean allAbstract = true;
boolean visited = false;
for (final Pair<MethodRepr, ClassRepr> pp : overridenInS) {
final ClassRepr cc = pp.second;
if (cc == MOCK_CLASS) {
visited = true;
continue;
}
if (cc.name == it.name) {
continue;
}
visited = true;
allAbstract = pp.first.isAbstract() || cc.isInterface();
if (!allAbstract) {
break;
}
}
if (allAbstract && visited) {
final Collection<File> sources = classToSourceFileGet(p);
if (sources != null && !myCompiledFiles.containsAll(sources)) {
myAffectedFiles.addAll(sources);
debug("Removed method is not abstract & overrides some abstract method which is not then over-overridden in subclass ", p);
for (File source : sources) {
debug("Affecting subclass source file ", source);
}
}
}
}
}
return true;
});
}
}
debug("End of removed methods processing");
}
private void processChangedMethods(final DiffState state, final ClassRepr.Diff diff, final ClassRepr it) {
final Collection<Pair<MethodRepr, MethodRepr.Diff>> changed = diff.methods().changed();
if (changed.isEmpty()) {
return;
}
debug("Processing changed methods:");
assert myFuture != null;
assert myAffectedFiles != null;
for (final Pair<MethodRepr, MethodRepr.Diff> mr : changed) {
final MethodRepr m = mr.first;
final MethodRepr.Diff d = mr.second;
final boolean throwsChanged = !d.exceptions().unchanged();
debug("Method: ", m.name);
if (it.isAnnotation()) {
if (d.defaultRemoved()) {
debug("Class is annotation, default value is removed => adding annotation query");
final TIntHashSet l = new TIntHashSet(DEFAULT_SET_CAPACITY, DEFAULT_SET_LOAD_FACTOR);
l.add(m.name);
final UsageRepr.AnnotationUsage annotationUsage = (UsageRepr.AnnotationUsage)UsageRepr
.createAnnotationUsage(myContext, TypeRepr.createClassType(myContext, it.name), l, null);
state.myAnnotationQuery.add(annotationUsage);
}
}
else if (d.base() != Difference.NONE || throwsChanged) {
final Supplier<TIntHashSet> propagated = lazy(()-> myFuture.propagateMethodAccess(m, it.name));
boolean affected = false;
boolean constrained = false;
final Set<UsageRepr.Usage> usages = new THashSet<>();
if (d.packageLocalOn()) {
debug("Method became package-private, affecting method usages outside the package");
myFuture.affectMethodUsages(m, propagated.get(), m.createUsage(myContext, it.name), usages, state.myDependants);
for (final UsageRepr.Usage usage : usages) {
state.myUsageConstraints.put(usage, myFuture.new PackageConstraint(it.getPackageName()));
}
state.myAffectedUsages.addAll(usages);
affected = true;
constrained = true;
}
if ((d.base() & Difference.TYPE) != 0 || (d.base() & Difference.SIGNATURE) != 0 || throwsChanged) {
if (!affected) {
debug("Return type, throws list or signature changed --- affecting method usages");
myFuture.affectMethodUsages(m, propagated.get(), m.createUsage(myContext, it.name), usages, state.myDependants);
final List<Pair<MethodRepr, ClassRepr>> overridingMethods = new LinkedList<>();
myFuture.addOverridingMethods(m, it, MethodRepr.equalByJavaRules(m), overridingMethods, null);
for(final Pair<MethodRepr, ClassRepr> p : overridingMethods) {
final ClassRepr aClass = p.getSecond();
if (aClass != MOCK_CLASS) {
final Collection<File> fileNames = classToSourceFileGet(aClass.name);
if (fileNames != null) {
myAffectedFiles.addAll(fileNames);
}
}
}
state.myAffectedUsages.addAll(usages);
affected = true;
}
}
else if ((d.base() & Difference.ACCESS) != 0) {
if ((d.addedModifiers() & (Opcodes.ACC_STATIC | Opcodes.ACC_PRIVATE | Opcodes.ACC_SYNTHETIC | Opcodes.ACC_BRIDGE)) != 0 ||
(d.removedModifiers() & Opcodes.ACC_STATIC) != 0) {
// When synthetic or bridge flags are added, this effectively means that explicitly written in the code
// method with the same signature and return type has been removed and a bridge method has been generated instead.
// In some cases (e.g. using raw types) the presence of such synthetic methods in the bytecode is ignored by the compiler
// so that the code that called such method via raw type reference might not compile anymore => to be on the safe side
// we should recompile all places where the method was used
if (!affected) {
debug("Added {static | private | synthetic | bridge} specifier or removed static specifier --- affecting method usages");
myFuture.affectMethodUsages(m, propagated.get(), m.createUsage(myContext, it.name), usages, state.myDependants);
state.myAffectedUsages.addAll(usages);
affected = true;
}
if ((d.addedModifiers() & Opcodes.ACC_STATIC) != 0) {
debug("Added static specifier --- affecting subclasses");
myFuture.affectSubclasses(it.name, myAffectedFiles, state.myAffectedUsages, state.myDependants, false, myCompiledFiles, null);
if (!m.isPrivate()) {
debug("Added static modifier --- affecting static member on-demand import usages");
myFuture.affectStaticMemberOnDemandUsages(it.name, propagated.get(), state.myAffectedUsages, state.myDependants);
}
}
else if ((d.removedModifiers() & Opcodes.ACC_STATIC) != 0) {
if (!m.isPrivate()) {
debug("Removed static modifier --- affecting static method import usages");
myFuture.affectStaticMemberImportUsages(m.name, it.name, propagated.get(), state.myAffectedUsages, state.myDependants);
}
}
}
else {
if ((d.addedModifiers() & Opcodes.ACC_FINAL) != 0 ||
(d.addedModifiers() & Opcodes.ACC_PUBLIC) != 0 ||
(d.addedModifiers() & Opcodes.ACC_ABSTRACT) != 0) {
debug("Added final, public or abstract specifier --- affecting subclasses");
myFuture.affectSubclasses(it.name, myAffectedFiles, state.myAffectedUsages, state.myDependants, false, myCompiledFiles, null);
}
if ((d.addedModifiers() & Opcodes.ACC_PROTECTED) != 0 && (d.removedModifiers() & Opcodes.ACC_PRIVATE) == 0) {
if (!constrained) {
debug("Added public or package-private method became protected --- affect method usages with protected constraint");
if (!affected) {
myFuture.affectMethodUsages(m, propagated.get(), m.createUsage(myContext, it.name), usages, state.myDependants);
state.myAffectedUsages.addAll(usages);
affected = true;
}
for (final UsageRepr.Usage usage : usages) {
state.myUsageConstraints.put(usage, myFuture.new InheritanceConstraint(it));
}
constrained = true;
}
}
}
}
if ((d.base() & Difference.ANNOTATIONS) != 0) {
final Set<AnnotationsChangeTracker.Recompile> toRecompile = EnumSet.noneOf(AnnotationsChangeTracker.Recompile.class);
for (AnnotationsChangeTracker extension : myAnnotationChangeTracker) {
if (toRecompile.containsAll(AnnotationsChangeTracker.RECOMPILE_ALL)) {
break;
}
final Set<AnnotationsChangeTracker.Recompile> actions = extension.methodAnnotationsChanged(myContext, m, d.annotations(), d.parameterAnnotations());
if (actions.contains(AnnotationsChangeTracker.Recompile.USAGES)) {
debug("Extension "+extension.getClass().getName()+" requested recompilation because of changes in annotations list --- affecting method usages");
}
if (actions.contains(AnnotationsChangeTracker.Recompile.SUBCLASSES)) {
debug("Extension "+extension.getClass().getName()+" requested recompilation because of changes in method annotations or method parameter annotations list --- affecting subclasses");
}
toRecompile.addAll(actions);
}
if (toRecompile.contains(AnnotationsChangeTracker.Recompile.USAGES)) {
myFuture.affectMethodUsages(m, propagated.get(), m.createUsage(myContext, it.name), usages, state.myDependants);
state.myAffectedUsages.addAll(usages);
if (constrained) {
// remove any constraints so that all usages of this method are recompiled
for (UsageRepr.Usage usage : usages) {
state.myUsageConstraints.remove(usage);
}
}
}
if (toRecompile.contains(AnnotationsChangeTracker.Recompile.SUBCLASSES)) {
myFuture.affectSubclasses(it.name, myAffectedFiles, state.myAffectedUsages, state.myDependants, false, myCompiledFiles, null);
}
}
}
}
debug("End of changed methods processing");
}
private boolean processAddedFields(final DiffState state, final ClassRepr.Diff diff, final ClassRepr classRepr) {
final Collection<FieldRepr> added = diff.fields().added();
if (added.isEmpty()) {
return true;
}
debug("Processing added fields");
assert myFuture != null;
assert myPresent != null;
assert myCompiledFiles != null;
assert myAffectedFiles != null;
for (final FieldRepr f : added) {
debug("Field: ", f.name);
if (!f.isPrivate()) {
final TIntHashSet subClasses = getAllSubclasses(classRepr.name);
subClasses.forEach(subClass -> {
final ClassRepr r = myFuture.classReprByName(subClass);
if (r != null) {
final Collection<File> sourceFileNames = classToSourceFileGet(subClass);
if (sourceFileNames != null && !myCompiledFiles.containsAll(sourceFileNames)) {
if (r.isLocal()) {
for (File sourceFileName : sourceFileNames) {
debug("Affecting local subclass (introduced field can potentially hide surrounding method parameters/local variables): ", sourceFileName);
}
myAffectedFiles.addAll(sourceFileNames);
}
else {
final int outerClass = r.getOuterClassName();
if (!isEmpty(outerClass) && myFuture.isFieldVisible(outerClass, f)) {
for (File sourceFileName : sourceFileNames) {
debug("Affecting inner subclass (introduced field can potentially hide surrounding class fields): ", sourceFileName);
}
myAffectedFiles.addAll(sourceFileNames);
}
}
}
}
debug("Affecting field usages referenced from subclass ", subClass);
final TIntHashSet propagated = myFuture.propagateFieldAccess(f.name, subClass);
myFuture.affectFieldUsages(f, propagated, f.createUsage(myContext, subClass), state.myAffectedUsages, state.myDependants);
if (f.isStatic()) {
myFuture.affectStaticMemberOnDemandUsages(subClass, propagated, state.myAffectedUsages, state.myDependants);
}
myFuture.appendDependents(subClass, state.myDependants);
return true;
});
}
final Collection<Pair<FieldRepr, ClassRepr>> overriddenFields = new HashSet<>();
myFuture.addOverriddenFields(f, classRepr, overriddenFields, null);
for (final Pair<FieldRepr, ClassRepr> p : overriddenFields) {
final FieldRepr ff = p.first;
final ClassRepr cc = p.second;
if (ff.isPrivate()) {
continue;
}
final boolean sameKind = f.myType.equals(ff.myType) && f.isStatic() == ff.isStatic() && f.isSynthetic() == ff.isSynthetic() && f.isFinal() == ff.isFinal();
if (!sameKind || Difference.weakerAccess(f.access, ff.access)) {
final TIntHashSet propagated = myPresent.propagateFieldAccess(ff.name, cc.name);
final Set<UsageRepr.Usage> affectedUsages = new HashSet<>();
debug("Affecting usages of overridden field in class ", cc.name);
myFuture.affectFieldUsages(ff, propagated, ff.createUsage(myContext, cc.name), affectedUsages, state.myDependants);
if (sameKind) {
// check if we can reduce the number of usages going to be recompiled
UsageConstraint constraint = null;
if (f.isProtected()) {
// no need to recompile usages in field class' package and hierarchy, since newly added field is accessible in this scope
constraint = myFuture.new InheritanceConstraint(cc);
}
else if (f.isPackageLocal()) {
// no need to recompile usages in field class' package, since newly added field is accessible in this scope
constraint = myFuture.new PackageConstraint(cc.getPackageName());
}
if (constraint != null) {
for (final UsageRepr.Usage usage : affectedUsages) {
state.myUsageConstraints.put(usage, constraint);
}
}
}
state.myAffectedUsages.addAll(affectedUsages);
}
}
}
debug("End of added fields processing");
return true;
}
private boolean processRemovedFields(final DiffState state, final ClassRepr.Diff diff, final ClassRepr it) {
final Collection<FieldRepr> removed = diff.fields().removed();
if (removed.isEmpty()) {
return true;
}
assert myPresent != null;
debug("Processing removed fields:");
for (final FieldRepr f : removed) {
debug("Field: ", f.name);
if (!myProcessConstantsIncrementally && !f.isPrivate() && (f.access & INLINABLE_FIELD_MODIFIERS_MASK) == INLINABLE_FIELD_MODIFIERS_MASK && f.hasValue()) {
debug("Field had value and was (non-private) final static => a switch to non-incremental mode requested");
if (!incrementalDecision(it.name, f, myAffectedFiles, myFilesToCompile, myFilter)) {
debug("End of Differentiate, returning false");
return false;
}
}
final TIntHashSet propagated = myPresent.propagateFieldAccess(f.name, it.name);
myPresent.affectFieldUsages(f, propagated, f.createUsage(myContext, it.name), state.myAffectedUsages, state.myDependants);
if (!f.isPrivate() && f.isStatic()) {
debug("The field was static --- affecting static field import usages");
myPresent.affectStaticMemberImportUsages(f.name, it.name, propagated, state.myAffectedUsages, state.myDependants);
}
}
debug("End of removed fields processing");
return true;
}
private boolean processChangedFields(final DiffState state, final ClassRepr.Diff diff, final ClassRepr it) {
final Collection<Pair<FieldRepr, Difference>> changed = diff.fields().changed();
if (changed.isEmpty()) {
return true;
}
debug("Processing changed fields:");
assert myFuture != null;
assert myPresent != null;
for (final Pair<FieldRepr, Difference> f : changed) {
final Difference d = f.second;
final FieldRepr field = f.first;
debug("Field: ", field.name);
final Supplier<TIntHashSet> propagated = lazy(()-> myFuture.propagateFieldAccess(field.name, it.name));
// only if the field was a compile-time constant
if (!field.isPrivate() && (field.access & INLINABLE_FIELD_MODIFIERS_MASK) == INLINABLE_FIELD_MODIFIERS_MASK && d.hadValue()) {
final int changedModifiers = d.addedModifiers() | d.removedModifiers();
final boolean harmful = (changedModifiers & (Opcodes.ACC_STATIC | Opcodes.ACC_FINAL)) != 0;
final boolean accessChanged = (changedModifiers & (Opcodes.ACC_PUBLIC | Opcodes.ACC_PRIVATE | Opcodes.ACC_PROTECTED)) != 0;
final boolean becameLessAccessible = accessChanged && d.accessRestricted();
final boolean valueChanged = (d.base() & Difference.VALUE) != 0;
if (harmful || valueChanged || becameLessAccessible) {
if (myProcessConstantsIncrementally) {
debug("Potentially inlined field changed it's access or value => affecting field usages");
myFuture.affectFieldUsages(field, propagated.get(), field.createUsage(myContext, it.name), state.myAffectedUsages, state.myDependants);
}
else {
debug("Potentially inlined field changed it's access or value => a switch to non-incremental mode requested");
if (!incrementalDecision(it.name, field, myAffectedFiles, myFilesToCompile, myFilter)) {
debug("End of Differentiate, returning false");
return false;
}
}
}
}
if (d.base() != Difference.NONE) {
if ((d.base() & Difference.TYPE) != 0 || (d.base() & Difference.SIGNATURE) != 0) {
debug("Type or signature changed --- affecting field usages");
myFuture.affectFieldUsages(
field, propagated.get(), field.createUsage(myContext, it.name), state.myAffectedUsages, state.myDependants
);
}
else if ((d.base() & Difference.ACCESS) != 0) {
if ((d.addedModifiers() & Opcodes.ACC_STATIC) != 0 ||
(d.removedModifiers() & Opcodes.ACC_STATIC) != 0 ||
(d.addedModifiers() & Opcodes.ACC_PRIVATE) != 0 ||
(d.addedModifiers() & Opcodes.ACC_VOLATILE) != 0) {
debug("Added/removed static modifier or added private/volatile modifier --- affecting field usages");
myFuture.affectFieldUsages(
field, propagated.get(), field.createUsage(myContext, it.name), state.myAffectedUsages, state.myDependants
);
if (!field.isPrivate()) {
if ((d.addedModifiers() & Opcodes.ACC_STATIC) != 0) {
debug("Added static modifier --- affecting static member on-demand import usages");
myFuture.affectStaticMemberOnDemandUsages(it.name, propagated.get(), state.myAffectedUsages, state.myDependants);
}
else if ((d.removedModifiers() & Opcodes.ACC_STATIC) != 0) {
debug("Removed static modifier --- affecting static field import usages");
myFuture.affectStaticMemberImportUsages(field.name, it.name, propagated.get(), state.myAffectedUsages, state.myDependants);
}
}
}
else {
final Set<UsageRepr.Usage> usages = new THashSet<>();
if ((d.addedModifiers() & Opcodes.ACC_FINAL) != 0) {
debug("Added final modifier --- affecting field assign usages");
myFuture.affectFieldUsages(field, propagated.get(), field.createAssignUsage(myContext, it.name), usages, state.myDependants);
state.myAffectedUsages.addAll(usages);
}
if ((d.removedModifiers() & Opcodes.ACC_PUBLIC) != 0) {
debug("Removed public modifier, affecting field usages with appropriate constraint");
myFuture.affectFieldUsages(field, propagated.get(), field.createUsage(myContext, it.name), usages, state.myDependants);
state.myAffectedUsages.addAll(usages);
for (final UsageRepr.Usage usage : usages) {
if ((d.addedModifiers() & Opcodes.ACC_PROTECTED) != 0) {
state.myUsageConstraints.put(usage, myFuture.new InheritanceConstraint(it));
}
else {
state.myUsageConstraints.put(usage, myFuture.new PackageConstraint(it.getPackageName()));
}
}
}
else if ((d.removedModifiers() & Opcodes.ACC_PROTECTED) != 0 && d.accessRestricted()) {
debug("Removed protected modifier and the field became less accessible, affecting field usages with package constraint");
myFuture.affectFieldUsages(field, propagated.get(), field.createUsage(myContext, it.name), usages, state.myDependants);
state.myAffectedUsages.addAll(usages);
for (final UsageRepr.Usage usage : usages) {
state.myUsageConstraints.put(usage, myFuture.new PackageConstraint(it.getPackageName()));
}
}
}
}
if ((d.base() & Difference.ANNOTATIONS) != 0) {
final Set<AnnotationsChangeTracker.Recompile> toRecompile = EnumSet.noneOf(AnnotationsChangeTracker.Recompile.class);
for (AnnotationsChangeTracker extension : myAnnotationChangeTracker) {
if (toRecompile.containsAll(AnnotationsChangeTracker.RECOMPILE_ALL)) {
break;
}
final Set<AnnotationsChangeTracker.Recompile> res = extension.fieldAnnotationsChanged(myContext, field, d.annotations());
if (res.contains(AnnotationsChangeTracker.Recompile.USAGES)) {
debug("Extension "+extension.getClass().getName()+" requested recompilation because of changes in annotations list --- affecting field usages");
}
if (res.contains(AnnotationsChangeTracker.Recompile.SUBCLASSES)) {
debug("Extension "+extension.getClass().getName()+" requested recompilation because of changes in field annotations list --- affecting subclasses");
}
toRecompile.addAll(res);
}
if (toRecompile.contains(AnnotationsChangeTracker.Recompile.USAGES)) {
final Set<UsageRepr.Usage> usages = new THashSet<>();
myFuture.affectFieldUsages(field, propagated.get(), field.createUsage(myContext, it.name), usages, state.myDependants);
state.myAffectedUsages.addAll(usages);
// remove any constraints to ensure all field usages are recompiled
for (UsageRepr.Usage usage : usages) {
state.myUsageConstraints.remove(usage);
}
}
if (toRecompile.contains(AnnotationsChangeTracker.Recompile.SUBCLASSES)) {
myFuture.affectSubclasses(it.name, myAffectedFiles, state.myAffectedUsages, state.myDependants, false, myCompiledFiles, null);
}
}
}
}
debug("End of changed fields processing");
return true;
}
private boolean processChangedClasses(final DiffState state) {
final Collection<Pair<ClassRepr, ClassRepr.Diff>> changedClasses = state.myClassDiff.changed();
if (!changedClasses.isEmpty()) {
debug("Processing changed classes:");
assert myFuture != null;
assert myPresent != null;
final Util.FileFilterConstraint fileFilterConstraint = myFilter != null? myPresent.new FileFilterConstraint(myFilter) : null;
for (final Pair<ClassRepr, ClassRepr.Diff> changed : changedClasses) {
final ClassRepr changedClass = changed.first;
final ClassRepr.Diff diff = changed.second;
myDelta.addChangedClass(changedClass.name);
debug("Changed: ", changedClass.name);
final int addedModifiers = diff.addedModifiers();
final boolean superClassChanged = (diff.base() & Difference.SUPERCLASS) != 0;
final boolean interfacesChanged = !diff.interfaces().unchanged();
final boolean signatureChanged = (diff.base() & Difference.SIGNATURE) != 0;
if (superClassChanged) {
myDelta.registerRemovedSuperClass(changedClass.name, changedClass.getSuperClass().className);
final ClassRepr newClass = myDelta.getClassReprByName(null, changedClass.name);
assert (newClass != null);
myDelta.registerAddedSuperClass(changedClass.name, newClass.getSuperClass().className);
}
if (interfacesChanged) {
for (final TypeRepr.AbstractType typ : diff.interfaces().removed()) {
myDelta.registerRemovedSuperClass(changedClass.name, ((TypeRepr.ClassType)typ).className);
}
for (final TypeRepr.AbstractType typ : diff.interfaces().added()) {
myDelta.registerAddedSuperClass(changedClass.name, ((TypeRepr.ClassType)typ).className);
}
}
if (myEasyMode) {
continue;
}
final TIntHashSet directDeps = myPresent.appendDependents(changedClass, state.myDependants);
if (superClassChanged || interfacesChanged || signatureChanged) {
debug("Superclass changed: ", superClassChanged);
debug("Interfaces changed: ", interfacesChanged);
debug("Signature changed ", signatureChanged);
final boolean extendsChanged = superClassChanged && !diff.extendsAdded();
final boolean interfacesRemoved = interfacesChanged && !diff.interfaces().removed().isEmpty();
debug("Extends changed: ", extendsChanged);
debug("Interfaces removed: ", interfacesRemoved);
myFuture.affectSubclasses(changedClass.name, myAffectedFiles, state.myAffectedUsages, state.myDependants, extendsChanged || interfacesRemoved || signatureChanged, myCompiledFiles, null);
if (extendsChanged && directDeps != null) {
final TypeRepr.ClassType excClass = TypeRepr.createClassType(myContext, changedClass.name);
directDeps.forEach(depClass -> {
final ClassRepr depClassRepr = myPresent.classReprByName(depClass);
if (depClassRepr != null) {
myPresent.affectMethodUsagesThrowing(depClassRepr, excClass, state.myAffectedUsages, state.myDependants);
}
return true;
});
}
if (!changedClass.isAnonymous()) {
final TIntHashSet parents = new TIntHashSet();
myPresent.collectSupersRecursively(changedClass.name, parents);
final TIntHashSet futureParents = new TIntHashSet();
myFuture.collectSupersRecursively(changedClass.name, futureParents);
parents.removeAll(futureParents.toArray());
parents.remove(myObjectClassName);
if (!parents.isEmpty()) {
parents.forEach(className -> {
debug("Affecting usages in generic type parameter bounds of class: ", className);
final UsageRepr.Usage usage = UsageRepr.createClassAsGenericBoundUsage(myContext, className);
state.myAffectedUsages.add(usage);
if (fileFilterConstraint != null) {
state.myUsageConstraints.put(usage, fileFilterConstraint);
}
myPresent.appendDependents(className, state.myDependants);
return true;
});
}
}
}
if ((diff.addedModifiers() & Opcodes.ACC_INTERFACE) != 0 || (diff.removedModifiers() & Opcodes.ACC_INTERFACE) != 0) {
debug("Class-to-interface or interface-to-class conversion detected, added class usage to affected usages");
state.myAffectedUsages.add(changedClass.createUsage());
}
if (changedClass.isAnnotation() && changedClass.getRetentionPolicy() == RetentionPolicy.SOURCE) {
debug("Annotation, retention policy = SOURCE => a switch to non-incremental mode requested");
if (!incrementalDecision(changedClass.getOuterClassName(), changedClass, myAffectedFiles, myFilesToCompile, myFilter)) {
debug("End of Differentiate, returning false");
return false;
}
}
if ((addedModifiers & Opcodes.ACC_PROTECTED) != 0) {
debug("Introduction of 'protected' modifier detected, adding class usage + inheritance constraint to affected usages");
final UsageRepr.Usage usage = changedClass.createUsage();
state.myAffectedUsages.add(usage);
state.myUsageConstraints.put(usage, myFuture.new InheritanceConstraint(changedClass));
}
if (diff.packageLocalOn()) {
debug("Introduction of 'package-private' access detected, adding class usage + package constraint to affected usages");
final UsageRepr.Usage usage = changedClass.createUsage();
state.myAffectedUsages.add(usage);
state.myUsageConstraints.put(usage, myFuture.new PackageConstraint(changedClass.getPackageName()));
}
if ((addedModifiers & Opcodes.ACC_FINAL) != 0 || (addedModifiers & Opcodes.ACC_PRIVATE) != 0) {
debug("Introduction of 'private' or 'final' modifier(s) detected, adding class usage to affected usages");
state.myAffectedUsages.add(changedClass.createUsage());
}
if ((addedModifiers & Opcodes.ACC_ABSTRACT) != 0 || (addedModifiers & Opcodes.ACC_STATIC) != 0) {
debug("Introduction of 'abstract' or 'static' modifier(s) detected, adding class new usage to affected usages");
state.myAffectedUsages.add(UsageRepr.createClassNewUsage(myContext, changedClass.name));
}
if (!changedClass.isAnonymous() && !isEmpty(changedClass.getOuterClassName()) && !changedClass.isPrivate()) {
if (addedModifiers != 0 || diff.removedModifiers() != 0) {
debug("Some modifiers (access flags) were changed for non-private inner class, adding class usage to affected usages");
state.myAffectedUsages.add(changedClass.createUsage());
}
}
if (changedClass.isAnnotation()) {
debug("Class is annotation, performing annotation-specific analysis");
if (diff.retentionChanged()) {
debug("Retention policy change detected, adding class usage to affected usages");
state.myAffectedUsages.add(changedClass.createUsage());
}
else if (diff.targetAttributeCategoryMightChange()) {
debug("Annotation's attribute category in bytecode might be affected because of TYPE_USE target, adding class usage to affected usages");
state.myAffectedUsages.add(changedClass.createUsage());
}
else {
final Collection<ElemType> removedtargets = diff.targets().removed();
if (removedtargets.contains(ElemType.LOCAL_VARIABLE)) {
debug("Removed target contains LOCAL_VARIABLE => a switch to non-incremental mode requested");
if (!incrementalDecision(changedClass.getOuterClassName(), changedClass, myAffectedFiles, myFilesToCompile, myFilter)) {
debug("End of Differentiate, returning false");
return false;
}
}
if (!removedtargets.isEmpty()) {
debug("Removed some annotation targets, adding annotation query");
state.myAnnotationQuery.add((UsageRepr.AnnotationUsage)UsageRepr.createAnnotationUsage(
myContext, TypeRepr.createClassType(myContext, changedClass.name), null, EnumSet.copyOf(removedtargets)
));
}
for (final MethodRepr m : diff.methods().added()) {
if (!m.hasValue()) {
debug("Added method with no default value: ", m.name);
debug("Adding class usage to affected usages");
state.myAffectedUsages.add(changedClass.createUsage());
}
}
}
debug("End of annotation-specific analysis");
}
processAddedMethods(state, diff, changedClass);
processRemovedMethods(state, diff, changedClass);
processChangedMethods(state, diff, changedClass);
if (!processAddedFields(state, diff, changedClass)) {
return false;
}
if (!processRemovedFields(state, diff, changedClass)) {
return false;
}
if (!processChangedFields(state, diff, changedClass)) {
return false;
}
if ((diff.base() & Difference.ANNOTATIONS) != 0) {
final Set<AnnotationsChangeTracker.Recompile> toRecompile = EnumSet.noneOf(AnnotationsChangeTracker.Recompile.class);
for (AnnotationsChangeTracker extension : myAnnotationChangeTracker) {
if (toRecompile.containsAll(AnnotationsChangeTracker.RECOMPILE_ALL)) {
break;
}
final Set<AnnotationsChangeTracker.Recompile> res = extension.classAnnotationsChanged(myContext, changedClass, diff.annotations());
if (res.contains(AnnotationsChangeTracker.Recompile.USAGES)) {
debug("Extension "+extension.getClass().getName()+" requested class usages recompilation because of changes in annotations list --- adding class usage to affected usages");
}
if (res.contains(AnnotationsChangeTracker.Recompile.SUBCLASSES)) {
debug("Extension "+extension.getClass().getName()+" requested subclasses recompilation because of changes in annotations list --- adding subclasses to affected usages");
}
toRecompile.addAll(res);
}
final boolean recompileUsages = toRecompile.contains(AnnotationsChangeTracker.Recompile.USAGES);
if (recompileUsages) {
state.myAffectedUsages.add(changedClass.createUsage());
}
if (toRecompile.contains(AnnotationsChangeTracker.Recompile.SUBCLASSES)) {
myFuture.affectSubclasses(changedClass.name, myAffectedFiles, state.myAffectedUsages, state.myDependants, recompileUsages, myCompiledFiles, null);
}
}
}
debug("End of changed classes processing");
}
return !myEasyMode;
}
private void processRemovedClases(final DiffState state, @NotNull File fileName) {
final Collection<ClassRepr> removed = state.myClassDiff.removed();
if (removed.isEmpty()) {
return;
}
assert myPresent != null;
assert myDelta.myChangedFiles != null;
myDelta.myChangedFiles.add(fileName);
debug("Processing removed classes:");
for (final ClassRepr c : removed) {
myDelta.addDeletedClass(c, fileName);
if (!myEasyMode) {
myPresent.appendDependents(c, state.myDependants);
debug("Adding usages of class ", c.name);
state.myAffectedUsages.add(c.createUsage());
debug("Affecting usages of removed class ", c.name);
affectAll(c.name, fileName, myAffectedFiles, myCompiledFiles, myFilter);
}
}
debug("End of removed classes processing.");
}
private void processAddedClasses(DiffState state) {
final Collection<ClassRepr> addedClasses = state.myClassDiff.added();
if (addedClasses.isEmpty()) {
return;
}
debug("Processing added classes:");
if (!myEasyMode && myFilter != null) {
// checking if this newly added class duplicates already existing one
assert myCompiledFiles != null;
assert myAffectedFiles != null;
for (ClassRepr c : addedClasses) {
if (!c.isLocal() && !c.isAnonymous() && isEmpty(c.getOuterClassName())) {
final Set<File> candidates = new THashSet<>(FileUtil.FILE_HASHING_STRATEGY);
final Collection<File> currentlyMapped = classToSourceFileGet(c.name);
if (currentlyMapped != null) {
candidates.addAll(currentlyMapped);
}
candidates.removeAll(myCompiledFiles);
final Collection<File> newSources = myDelta.classToSourceFileGet(c.name);
if (newSources != null) {
candidates.removeAll(newSources);
}
final Set<File> nonExistentOrOutOfScope = new THashSet<>(FileUtil.FILE_HASHING_STRATEGY);
for (final File candidate : candidates) {
if (!candidate.exists() || !myFilter.belongsToCurrentTargetChunk(candidate)) {
nonExistentOrOutOfScope.add(candidate);
}
}
candidates.removeAll(nonExistentOrOutOfScope);
if (!candidates.isEmpty()) {
// Possibly duplicate classes from different sets of source files
// Schedule for recompilation both to make possible 'duplicate sources' error evident
candidates.clear(); // just reusing the container
if (currentlyMapped != null) {
candidates.addAll(currentlyMapped);
}
if (newSources != null) {
candidates.addAll(newSources);
}
candidates.removeAll(nonExistentOrOutOfScope);
if (myDebugS.isDebugEnabled()) {
final StringBuilder msg = new StringBuilder();
msg.append("Possibly duplicated classes; Scheduling for recompilation sources: ");
for (File file : candidates) {
msg.append(file.getPath()).append("; ");
}
debug(msg.toString());
}
myAffectedFiles.addAll(candidates);
return; // do not process this file because it should not be integrated
}
}
}
}
for (final ClassRepr c : addedClasses) {
debug("Class name: ", c.name);
myDelta.addAddedClass(c);
for (final int sup : c.getSupers()) {
myDelta.registerAddedSuperClass(c.name, sup);
}
if (!myEasyMode && !c.isAnonymous() && !c.isLocal()) {
final TIntHashSet toAffect = new TIntHashSet();
toAffect.add(c.name);
final TIntHashSet classes = myShortClassNameIndex.get(myContext.get(c.getShortName()));
if (classes != null) {
// affecting dependencies on all other classes with the same short name
toAffect.addAll(classes.toArray());
}
toAffect.forEach(qName -> {
final TIntHashSet depClasses = myClassToClassDependency.get(qName);
if (depClasses != null) {
affectCorrespondingSourceFiles(depClasses);
}
return true;
});
}
}
debug("End of added classes processing.");
}
private void affectCorrespondingSourceFiles(TIntHashSet toAffect) {
assert myAffectedFiles != null;
toAffect.forEach(depClass -> {
final Collection<File> fNames = classToSourceFileGet(depClass);
if (fNames != null) {
for (File fName : fNames) {
if (myFilter == null || myFilter.accept(fName)) {
debug("Adding dependent file ", fName);
myAffectedFiles.add(fName);
}
}
}
return true;
});
}
private void calculateAffectedFiles(final DiffState state) {
debug("Checking dependent classes:");
assert myAffectedFiles != null;
assert myCompiledFiles != null;
state.myDependants.forEach(new TIntProcedure() {
@Override
public boolean execute(final int depClass) {
final Collection<File> depFiles = classToSourceFileGet(depClass);
if (depFiles != null) {
for (File depFile : depFiles) {
processDependentFile(depClass, depFile);
}
}
return true;
}
private void processDependentFile(int depClass, @NotNull File depFile) {
if (myAffectedFiles.contains(depFile)) {
return;
}
debug("Dependent class: ", depClass);
final ClassFileRepr repr = getReprByName(depFile, depClass);
if (repr == null) {
return;
}
if (repr instanceof ClassRepr && !((ClassRepr)repr).hasInlinedConstants() && myCompiledFiles.contains(depFile)) {
// Classes containing inlined constants from other classes and compiled against older constant values
// may need to be recompiled several times within a compile session.
// Otherwise it is safe to skip the file if it has already been compiled in this session.
return;
}
final Set<UsageRepr.Usage> depUsages = repr.getUsages();
if (depUsages == null || depUsages.isEmpty()) {
return;
}
for (UsageRepr.Usage usage : depUsages) {
if (usage instanceof UsageRepr.AnnotationUsage) {
final UsageRepr.AnnotationUsage annotationUsage = (UsageRepr.AnnotationUsage)usage;
for (final UsageRepr.AnnotationUsage query : state.myAnnotationQuery) {
if (query.satisfies(annotationUsage)) {
debug("Added file due to annotation query");
myAffectedFiles.add(depFile);
return;
}
}
}
else if (state.myAffectedUsages.contains(usage)) {
final UsageConstraint constraint = state.myUsageConstraints.get(usage);
if (constraint == null) {
debug("Added file with no constraints");
myAffectedFiles.add(depFile);
return;
}
if (constraint.checkResidence(depClass)) {
debug("Added file with satisfied constraint");
myAffectedFiles.add(depFile);
return;
}
}
}
}
});
}
boolean differentiate() {
synchronized (myLock) {
myDelta.myIsDifferentiated = true;
if (myDelta.myIsRebuild) {
return true;
}
debug("Begin of Differentiate:");
debug("Easy mode: ", myEasyMode);
try {
processDisappearedClasses();
final List<FileClasses> newClasses = new ArrayList<>();
myDelta.myRelativeSourceFilePathToClasses.forEachEntry(new TObjectObjectProcedure<String, Collection<ClassFileRepr>>() {
@Override
public boolean execute(String relativeFilePath, Collection<ClassFileRepr> content) {
File file = toFull(relativeFilePath);
if (myFilesToCompile == null || myFilesToCompile.contains(file)) {
// Consider only files actually compiled in this round.
// For other sources the list of classes taken from this map will be possibly incomplete.
newClasses.add(new FileClasses(file, content));
}
return true;
}
});
for (final FileClasses compiledFile : newClasses) {
final File fileName = compiledFile.myFileName;
final Set<ClassRepr> pastClasses = new THashSet<>();
final Set<ModuleRepr> pastModules = new THashSet<>();
final Collection<ClassFileRepr> past = sourceFileToClassesGet(fileName);
if (past != null) {
for (ClassFileRepr repr : past) {
if (repr instanceof ClassRepr) {
pastClasses.add((ClassRepr)repr);
}
else {
pastModules.add((ModuleRepr)repr);
}
}
}
final DiffState state = new DiffState(
Difference.make(pastClasses, compiledFile.myFileClasses),
Difference.make(pastModules, compiledFile.myFileModules)
);
if (!myEasyMode) {
processModules(state, fileName);
}
if (!processChangedClasses(state)) {
if (!myEasyMode) {
// turning non-incremental
return false;
}
}
processRemovedClases(state, fileName);
processAddedClasses(state);
if (!myEasyMode) {
calculateAffectedFiles(state);
}
}
// Now that the list of added classes is complete,
// check that super-classes of compiled classes are among newly added ones.
// Even if compiled class did not change, we should register 'added' superclass
// Consider situation for class B extends A:
// 1. file A is removed, make fails with error in file B
// 2. A is added back, B and A are compiled together in the second make session
// 3. Even if B did not change, A is considered as newly added and should be registered again in ClassToSubclasses dependencies
// Without this code such registration will not happen because list of B's parents did not change
final Set<ClassRepr> addedClasses = myDelta.getAddedClasses();
if (!addedClasses.isEmpty()) {
final TIntHashSet addedNames = new TIntHashSet();
for (ClassRepr repr : addedClasses) {
addedNames.add(repr.name);
}
for (FileClasses compiledFile : newClasses) {
for (ClassRepr aClass : compiledFile.myFileClasses) {
for (int parent : aClass.getSupers()) {
if (addedNames.contains(parent)) {
myDelta.registerAddedSuperClass(aClass.name, parent);
}
}
}
}
}
debug("End of Differentiate.");
if (myEasyMode) {
return false;
}
assert myAffectedFiles != null;
//assert myDelayedWorks != null;
final Collection<String> removed = myDelta.myRemovedFiles;
if (removed != null) {
for (final String r : removed) {
myAffectedFiles.remove(new File(r));
}
}
return true/*myDelayedWorks.doWork(myAffectedFiles)*/;
}
finally {
if (myFilesToCompile != null) {
assert myDelta.myChangedFiles != null;
// if some class is associated with several sources,
// some of them may not have been compiled in this round, so such files should be considered unchanged
myDelta.myChangedFiles.retainAll(myFilesToCompile);
}
}
}
}
private void processModules(final DiffState state, File fileName) {
final Difference.Specifier<ModuleRepr, ModuleRepr.Diff> modulesDiff = state.myModulesDiff;
if (modulesDiff.unchanged()) {
return;
}
for (ModuleRepr moduleRepr : modulesDiff.added()) {
myDelta.addChangedClass(moduleRepr.name); // need this for integrate
// after module has been added, the whole target should be rebuilt
// because necessary 'require' directives may be missing from the newly added module-info file
myFuture.affectModule(moduleRepr, myAffectedFiles);
}
for (ModuleRepr removedModule : modulesDiff.removed()) {
myDelta.addDeletedClass(removedModule, fileName); // need this for integrate
myPresent.affectDependentModules(state, removedModule.name, null, true);
}
for (Pair<ModuleRepr, ModuleRepr.Diff> pair : modulesDiff.changed()) {
final ModuleRepr moduleRepr = pair.first;
final ModuleRepr.Diff d = pair.second;
boolean affectSelf = false;
boolean affectDeps = false;
UsageConstraint constraint = null;
myDelta.addChangedClass(moduleRepr.name); // need this for integrate
if (d.versionChanged()) {
final int version = moduleRepr.getVersion();
myPresent.affectDependentModules(state, moduleRepr.name, new UsageConstraint() {
@Override
public boolean checkResidence(int dep) {
final ModuleRepr depModule = myPresent.moduleReprByName(dep);
if (depModule != null) {
for (ModuleRequiresRepr requires : depModule.getRequires()) {
if (requires.name == moduleRepr.name && requires.getVersion() == version) {
return true;
}
}
}
return false;
}
}, false);
}
final Difference.Specifier<ModuleRequiresRepr, ModuleRequiresRepr.Diff> requiresDiff = d.requires();
for (ModuleRequiresRepr removed : requiresDiff.removed()) {
affectSelf = true;
if (removed.isTransitive()) {
affectDeps = true;
constraint = UsageConstraint.ANY;
break;
}
}
for (Pair<ModuleRequiresRepr, ModuleRequiresRepr.Diff> changed : requiresDiff.changed()) {
affectSelf |= changed.second.versionChanged();
if (changed.second.becameNonTransitive()) {
affectDeps = true;
// we could have created more precise constraint here: analyze if required module (recursively)
// has only qualified exports that include given module's name. But this seems to be excessive since
// in most cases module's exports are unqualified, so that any other module can access the exported API.
constraint = UsageConstraint.ANY;
}
}
final Difference.Specifier<ModulePackageRepr, ModulePackageRepr.Diff> exportsDiff = d.exports();
if (!affectDeps) {
for (ModulePackageRepr removedPackage : exportsDiff.removed()) {
affectDeps = true;
if (!removedPackage.isQualified()) {
constraint = UsageConstraint.ANY;
break;
}
for (Integer name : removedPackage.getModuleNames()) {
final UsageConstraint matchName = UsageConstraint.exactMatch(name);
if (constraint == null) {
constraint = matchName;
}
else {
constraint = constraint.or(matchName);
}
}
}
}
if (!affectDeps || constraint != UsageConstraint.ANY) {
for (Pair<ModulePackageRepr, ModulePackageRepr.Diff> p : exportsDiff.changed()) {
final Collection<Integer> removedModuleNames = p.second.targetModules().removed();
affectDeps |= !removedModuleNames.isEmpty();
if (!removedModuleNames.isEmpty()) {
affectDeps = true;
for (Integer name : removedModuleNames) {
final UsageConstraint matchName = UsageConstraint.exactMatch(name);
if (constraint == null) {
constraint = matchName;
}
else {
constraint = constraint.or(matchName);
}
}
}
}
}
if (affectSelf) {
myPresent.affectModule(moduleRepr, myAffectedFiles);
}
if (affectDeps) {
myPresent.affectDependentModules(state, moduleRepr.name, constraint, true);
}
}
}
}
public void differentiateOnRebuild(final Mappings delta) {
new Differential(delta).differentiate();
}
public void differentiateOnNonIncrementalMake(final Mappings delta,
final Collection<String> removed,
final Collection<? extends File> filesToCompile) {
new Differential(delta, removed, filesToCompile).differentiate();
}
public boolean differentiateOnIncrementalMake
(final Mappings delta,
final Collection<String> removed,
final Collection<? extends File> filesToCompile,
final Collection<? extends File> compiledWithErrors,
final Collection<? extends File> compiledFiles,
final Collection<? super File> affectedFiles,
@NotNull final DependentFilesFilter filter,
@Nullable final Callbacks.ConstantAffectionResolver constantSearch) {
return new Differential(delta, removed, filesToCompile, compiledWithErrors, compiledFiles, affectedFiles, filter).differentiate();
}
private void cleanupBackDependency(final int className, @Nullable Set<? extends UsageRepr.Usage> usages, final IntIntMultiMaplet buffer) {
if (usages == null) {
final ClassFileRepr repr = getReprByName(null, className);
if (repr != null) {
usages = repr.getUsages();
}
}
if (usages != null) {
for (final UsageRepr.Usage u : usages) {
buffer.put(u.getOwner(), className);
}
}
}
private void cleanupRemovedClass(final Mappings delta, @NotNull final ClassFileRepr cr, File sourceFile, final Set<? extends UsageRepr.Usage> usages, final IntIntMultiMaplet dependenciesTrashBin) {
final int className = cr.name;
// it is safe to cleanup class information if it is mapped to non-existing files only
final Collection<File> currentlyMapped = classToSourceFileGet(className);
if (currentlyMapped == null || currentlyMapped.isEmpty()) {
return;
}
if (currentlyMapped.size() == 1) {
if (!FileUtil.filesEqual(sourceFile, currentlyMapped.iterator().next())) {
// if classname is already mapped to a different source, the class with such FQ name exists elsewhere, so
// we cannot destroy all these links
return;
}
}
else {
// many files
for (File file : currentlyMapped) {
if (!FileUtil.filesEqual(sourceFile, file) && file.exists()) {
return;
}
}
}
if (cr instanceof ClassRepr) {
for (final int superSomething : ((ClassRepr)cr).getSupers()) {
delta.registerRemovedSuperClass(className, superSomething);
}
}
cleanupBackDependency(className, usages, dependenciesTrashBin);
myClassToClassDependency.remove(className);
myClassToSubclasses.remove(className);
myClassToRelativeSourceFilePath.remove(className);
if (cr instanceof ClassRepr) {
final ClassRepr _cr = (ClassRepr)cr;
if (!_cr.isLocal() && !_cr.isAnonymous()) {
myShortClassNameIndex.removeFrom(myContext.get(_cr.getShortName()), className);
}
}
}
public void integrate(final Mappings delta) {
synchronized (myLock) {
try {
assert (delta.isDifferentiated());
final Collection<String> removed = delta.myRemovedFiles;
delta.runPostPasses();
final IntIntMultiMaplet dependenciesTrashBin = new IntIntTransientMultiMaplet();
if (removed != null) {
for (final String file : removed) {
final File deletedFile = new File(file);
final Set<ClassFileRepr> fileClasses = (Set<ClassFileRepr>)sourceFileToClassesGet(deletedFile);
if (fileClasses != null) {
for (final ClassFileRepr aClass : fileClasses) {
cleanupRemovedClass(delta, aClass, deletedFile, aClass.getUsages(), dependenciesTrashBin);
}
myRelativeSourceFilePathToClasses.remove(myRelativizer.toRelative(file));
}
}
}
if (!delta.isRebuild()) {
for (final Pair<ClassFileRepr, File> pair : delta.getDeletedClasses()) {
final ClassFileRepr deletedClass = pair.first;
cleanupRemovedClass(delta, deletedClass, pair.second, deletedClass.getUsages(), dependenciesTrashBin);
}
for (ClassRepr repr : delta.getAddedClasses()) {
if (!repr.isAnonymous() && !repr.isLocal()) {
myShortClassNameIndex.put(myContext.get(repr.getShortName()), repr.name);
}
}
final TIntHashSet superClasses = new TIntHashSet();
final IntIntTransientMultiMaplet addedSuperClasses = delta.getAddedSuperClasses();
final IntIntTransientMultiMaplet removedSuperClasses = delta.getRemovedSuperClasses();
addAllKeys(superClasses, addedSuperClasses);
addAllKeys(superClasses, removedSuperClasses);
superClasses.forEach(superClass -> {
final TIntHashSet added = addedSuperClasses.get(superClass);
TIntHashSet removed12 = removedSuperClasses.get(superClass);
final TIntHashSet old = myClassToSubclasses.get(superClass);
if (old == null) {
if (added != null && !added.isEmpty()) {
myClassToSubclasses.replace(superClass, added);
}
}
else {
boolean changed = false;
final int[] addedAsArray = added != null && !added.isEmpty()? added.toArray() : null;
if (removed12 != null && !removed12.isEmpty()) {
if (addedAsArray != null) {
// optimization: avoid unnecessary changes in the set
removed12 = (TIntHashSet)removed12.clone();
removed12.removeAll(addedAsArray);
}
if (!removed12.isEmpty()) {
changed = old.removeAll(removed12.toArray());
}
}
if (addedAsArray != null) {
changed |= old.addAll(addedAsArray);
}
if (changed) {
myClassToSubclasses.replace(superClass, old);
}
}
return true;
});
delta.getChangedClasses().forEach(className -> {
final Collection<String> sourceFiles = delta.myClassToRelativeSourceFilePath.get(className);
myClassToRelativeSourceFilePath.replace(className, sourceFiles);
cleanupBackDependency(className, null, dependenciesTrashBin);
return true;
});
delta.getChangedFiles().forEach(fileName -> {
String relative = toRelative(fileName);
final Collection<ClassFileRepr> classes = delta.myRelativeSourceFilePathToClasses.get(relative);
myRelativeSourceFilePathToClasses.replace(relative, classes);
return true;
});
// some classes may be associated with multiple sources.
// In case some of these sources was not compiled, but the class was changed, we need to update
// sourceToClasses mapping for such sources to include the updated ClassRepr version of the changed class
final THashSet<File> unchangedSources = new THashSet<>(FileUtil.FILE_HASHING_STRATEGY);
delta.myRelativeSourceFilePathToClasses.forEachEntry(new TObjectObjectProcedure<String, Collection<ClassFileRepr>>() {
@Override
public boolean execute(String source, Collection<ClassFileRepr> b) {
unchangedSources.add(toFull(source));
return true;
}
});
unchangedSources.removeAll(delta.getChangedFiles());
if (!unchangedSources.isEmpty()) {
unchangedSources.forEach(unchangedSource -> {
final Collection<ClassFileRepr> updatedClasses = delta.sourceFileToClassesGet(unchangedSource);
if (updatedClasses != null && !updatedClasses.isEmpty()) {
final List<ClassFileRepr> classesToPut = new ArrayList<>();
final TIntHashSet updatedClassNames = new TIntHashSet();
for (ClassFileRepr aClass : updatedClasses) {
// from all generated classes on this round consider only 'differentiated' ones, for
// which we can reliably say that the class has changed. Keep classes, for which no such checks were made,
// to make it possible to create a diff and compare changes on next compilation rounds.
if (delta.getChangedClasses().contains(aClass.name)) {
classesToPut.add(aClass);
updatedClassNames.add(aClass.name);
}
}
final Collection<ClassFileRepr> currentClasses = sourceFileToClassesGet(unchangedSource);
if (currentClasses != null) {
for (ClassFileRepr aClass : currentClasses) {
if (!updatedClassNames.contains(aClass.name)) {
classesToPut.add(aClass);
}
}
}
myRelativeSourceFilePathToClasses.replace(toRelative(unchangedSource), classesToPut);
}
return true;
});
}
}
else {
myClassToSubclasses.putAll(delta.myClassToSubclasses);
myClassToRelativeSourceFilePath.replaceAll(delta.myClassToRelativeSourceFilePath);
myRelativeSourceFilePathToClasses.replaceAll(delta.myRelativeSourceFilePathToClasses);
delta.myRelativeSourceFilePathToClasses.forEachEntry(new TObjectObjectProcedure<String, Collection<ClassFileRepr>>() {
@Override
public boolean execute(String src, Collection<ClassFileRepr> classes) {
for (ClassFileRepr repr : classes) {
if (repr instanceof ClassRepr) {
final ClassRepr clsRepr = (ClassRepr)repr;
if (!clsRepr.isAnonymous() && !clsRepr.isLocal()) {
myShortClassNameIndex.put(myContext.get(clsRepr.getShortName()), repr.name);
}
}
}
return true;
}
});
}
// updating classToClass dependencies
final TIntHashSet affectedClasses = new TIntHashSet();
addAllKeys(affectedClasses, dependenciesTrashBin);
addAllKeys(affectedClasses, delta.myClassToClassDependency);
affectedClasses.forEach(aClass -> {
final TIntHashSet now = delta.myClassToClassDependency.get(aClass);
final TIntHashSet toRemove = dependenciesTrashBin.get(aClass);
final boolean hasDataToAdd = now != null && !now.isEmpty();
if (toRemove != null && !toRemove.isEmpty()) {
final TIntHashSet current = myClassToClassDependency.get(aClass);
if (current != null && !current.isEmpty()) {
final TIntHashSet before = new TIntHashSet();
addAll(before, current);
final boolean removed1 = current.removeAll(toRemove.toArray());
final boolean added = hasDataToAdd && current.addAll(now.toArray());
if ((removed1 && !added) || (!removed1 && added) || !before.equals(current)) {
myClassToClassDependency.replace(aClass, current);
}
}
else {
if (hasDataToAdd) {
myClassToClassDependency.put(aClass, now);
}
}
}
else {
// nothing to remove for this class
if (hasDataToAdd) {
myClassToClassDependency.put(aClass, now);
}
}
return true;
});
}
finally {
delta.close();
}
}
}
public Callbacks.Backend getCallback() {
return new Callbacks.Backend() {
// className -> {imports; static_imports}
private final Map<String, Pair<Collection<String>, Collection<String>>> myImportRefs = Collections.synchronizedMap(new HashMap<>());
private final Map<String, Collection<Callbacks.ConstantRef>> myConstantRefs = Collections.synchronizedMap(new HashMap<>());
@Override
public void associate(String classFileName, Collection<String> sources, ClassReader cr) {
synchronized (myLock) {
final int classFileNameS = myContext.get(classFileName);
final ClassFileRepr result = new ClassfileAnalyzer(myContext).analyze(classFileNameS, cr);
if (result != null) {
// since java9 'repr' can represent either a class or a compiled module-info.java
final int className = result.name;
if (result instanceof ClassRepr) {
final ClassRepr classRepr = (ClassRepr)result;
final String classNameStr = myContext.getValue(className);
if (addConstantUsages(classRepr, myConstantRefs.remove(classNameStr))) {
// Important: should register constants before imports, because imports can produce additional
// field references too and addConstantUsages may return false in this case
classRepr.setHasInlinedConstants(true);
}
final Pair<Collection<String>, Collection<String>> imports = myImportRefs.remove(classNameStr);
if (imports != null) {
addImportUsages(classRepr, imports.getFirst(), imports.getSecond());
}
}
for (String sourceFileName : sources) {
String relative = myRelativizer.toRelative(sourceFileName);
myClassToRelativeSourceFilePath.put(className, relative);
myRelativeSourceFilePathToClasses.put(relative, result);
}
if (result instanceof ClassRepr) {
for (final int s : ((ClassRepr)result).getSupers()) {
myClassToSubclasses.put(s, className);
}
}
for (final UsageRepr.Usage u : result.getUsages()) {
final int owner = u.getOwner();
if (owner != className) {
myClassToClassDependency.put(owner, className);
}
}
}
}
}
@Override
public void associate(final String classFileName, final String sourceFileName, final ClassReader cr) {
associate(classFileName, Collections.singleton(sourceFileName), cr);
}
@Override
public void registerImports(String className, Collection<String> classImports, Collection<String> staticImports) {
final String key = className.replace('.', '/');
if (!classImports.isEmpty() || !staticImports.isEmpty()) {
myImportRefs.put(key, Pair.create(classImports, staticImports));
}
else {
myImportRefs.remove(key);
}
}
@Override
public void registerConstantReferences(String className, Collection<Callbacks.ConstantRef> cRefs) {
final String key = className.replace('.', '/');
if (!cRefs.isEmpty()) {
myConstantRefs.put(key, cRefs);
}
else {
myConstantRefs.remove(key);
}
}
private void addImportUsages(ClassRepr repr, Collection<String> classImports, Collection<String> staticImports) {
for (final String anImport : classImports) {
if (!anImport.endsWith(IMPORT_WILDCARD_SUFFIX)) {
repr.addUsage(UsageRepr.createClassUsage(myContext, myContext.get(anImport.replace('.', '/'))));
}
}
for (String anImport : staticImports) {
if (anImport.endsWith(IMPORT_WILDCARD_SUFFIX)) {
final int iname = myContext.get(anImport.substring(0, anImport.length() - IMPORT_WILDCARD_SUFFIX.length()).replace('.', '/'));
repr.addUsage(UsageRepr.createClassUsage(myContext, iname));
repr.addUsage(UsageRepr.createImportStaticOnDemandUsage(myContext, iname));
}
else {
final int i = anImport.lastIndexOf('.');
if (i > 0 && i < anImport.length() - 1) {
final int iname = myContext.get(anImport.substring(0, i).replace('.', '/'));
final int memberName = myContext.get(anImport.substring(i+1));
repr.addUsage(UsageRepr.createClassUsage(myContext, iname));
repr.addUsage(UsageRepr.createImportStaticMemberUsage(myContext, memberName, iname));
}
}
}
}
private boolean addConstantUsages(ClassRepr repr, Collection<Callbacks.ConstantRef> cRefs) {
boolean addedNewUsages = false;
if (cRefs != null) {
for (Callbacks.ConstantRef ref : cRefs) {
final int owner = myContext.get(ref.getOwner().replace('.', '/'));
if (repr.name != owner) {
addedNewUsages |= repr.addUsage(UsageRepr.createFieldUsage(myContext, myContext.get(ref.getName()), owner, myContext.get(ref.getDescriptor())));
}
}
}
return addedNewUsages;
}
};
}
@Nullable
public Set<ClassRepr> getClasses(final String sourceFileName) {
final File f = new File(sourceFileName);
synchronized (myLock) {
final Collection<ClassFileRepr> reprs = sourceFileToClassesGet(f);
if (reprs == null || reprs.isEmpty()) {
return null;
}
final Set<ClassRepr> result = new THashSet<>();
for (ClassFileRepr repr : reprs) {
if (repr instanceof ClassRepr) {
result.add((ClassRepr)repr);
}
}
return result;
}
}
@Nullable
public Collection<File> getClassSources(int className) {
synchronized (myLock) {
return classToSourceFileGet(className);
}
}
public void close() {
BuildDataCorruptedException error = null;
synchronized (myLock) {
for (CloseableMaplet maplet : Arrays.asList(myClassToSubclasses, myClassToClassDependency, myRelativeSourceFilePathToClasses, myClassToRelativeSourceFilePath, myShortClassNameIndex)) {
if (maplet != null) {
try {
maplet.close();
}
catch (BuildDataCorruptedException ex) {
if (error == null) {
error = ex;
}
}
}
}
if (!myIsDelta) {
// only close if you own the context
final DependencyContext context = myContext;
if (context != null) {
try {
context.close();
}
catch (BuildDataCorruptedException ex) {
if (error == null) {
error = ex;
}
}
myContext = null;
}
}
}
if (error != null) {
throw error;
}
}
public void flush(final boolean memoryCachesOnly) {
synchronized (myLock) {
myClassToSubclasses.flush(memoryCachesOnly);
myClassToClassDependency.flush(memoryCachesOnly);
myRelativeSourceFilePathToClasses.flush(memoryCachesOnly);
myClassToRelativeSourceFilePath.flush(memoryCachesOnly);
if (!myIsDelta) {
myShortClassNameIndex.flush(memoryCachesOnly);
// flush if you own the context
final DependencyContext context = myContext;
if (context != null) {
context.clearMemoryCaches();
if (!memoryCachesOnly) {
context.flush();
}
}
}
}
}
private static boolean addAll(final TIntHashSet whereToAdd, TIntHashSet whatToAdd) {
if (whatToAdd.isEmpty()) {
return false;
}
final Ref<Boolean> changed = new Ref<>(Boolean.FALSE);
whatToAdd.forEach(value -> {
if (whereToAdd.add(value)) {
changed.set(Boolean.TRUE);
}
return true;
});
return changed.get();
}
private static void addAllKeys(final TIntHashSet whereToAdd, final IntIntMultiMaplet maplet) {
maplet.forEachEntry(new TIntObjectProcedure<TIntHashSet>() {
@Override
public boolean execute(int key, TIntHashSet b) {
whereToAdd.add(key);
return true;
}
});
}
private void registerAddedSuperClass(final int aClass, final int superClass) {
assert (myAddedSuperClasses != null);
myAddedSuperClasses.put(superClass, aClass);
}
private void registerRemovedSuperClass(final int aClass, final int superClass) {
assert (myRemovedSuperClasses != null);
myRemovedSuperClasses.put(superClass, aClass);
}
private boolean isDifferentiated() {
return myIsDifferentiated;
}
private boolean isRebuild() {
return myIsRebuild;
}
private void addDeletedClass(final ClassFileRepr cr, File fileName) {
assert (myDeletedClasses != null);
myDeletedClasses.add(Pair.create(cr, fileName));
addChangedClass(cr.name);
}
private void addAddedClass(final ClassRepr cr) {
assert (myAddedClasses != null);
myAddedClasses.add(cr);
addChangedClass(cr.name);
}
private void addChangedClass(final int it) {
assert (myChangedClasses != null && myChangedFiles != null);
myChangedClasses.add(it);
final Collection<File> files = classToSourceFileGet(it);
if (files != null) {
myChangedFiles.addAll(files);
}
}
@NotNull
private Set<Pair<ClassFileRepr, File>> getDeletedClasses() {
return myDeletedClasses == null ? Collections.emptySet() : Collections.unmodifiableSet(myDeletedClasses);
}
@NotNull
private Set<ClassRepr> getAddedClasses() {
return myAddedClasses == null ? Collections.emptySet() : Collections.unmodifiableSet(myAddedClasses);
}
private TIntHashSet getChangedClasses() {
return myChangedClasses;
}
private THashSet<File> getChangedFiles() {
return myChangedFiles;
}
private static void debug(final String s) {
LOG.debug(s);
}
private void debug(final String comment, final int s) {
myDebugS.debug(comment, s);
}
private void debug(final String comment, final File f) {
debug(comment, f.getPath());
}
private void debug(final String comment, final String s) {
myDebugS.debug(comment, s);
}
private void debug(final String comment, final boolean s) {
myDebugS.debug(comment, s);
}
public void toStream(final PrintStream stream) {
final Streamable[] data = {
myClassToSubclasses,
myClassToClassDependency,
myRelativeSourceFilePathToClasses,
myClassToRelativeSourceFilePath,
};
final String[] info = {
"ClassToSubclasses",
"ClassToClassDependency",
"SourceFileToClasses",
"ClassToSourceFile",
"SourceFileToAnnotationUsages",
"SourceFileToUsages"
};
for (int i = 0; i < data.length; i++) {
stream.print("Begin Of ");
stream.println(info[i]);
data[i].toStream(myContext, stream);
stream.print("End Of ");
stream.println(info[i]);
}
}
private static <T> Supplier<T> lazy(Supplier<T> calculation) {
return new Supplier<T>() {
Ref<T> calculated;
@Override
public T get() {
return (calculated != null? calculated : (calculated = new Ref<>(calculation.get()))).get();
}
};
}
public void toStream(File outputRoot) {
final Streamable[] data = {
myClassToSubclasses,
myClassToClassDependency,
myRelativeSourceFilePathToClasses,
myClassToRelativeSourceFilePath,
myShortClassNameIndex
};
final String[] info = {
"ClassToSubclasses",
"ClassToClassDependency",
"SourceFileToClasses",
"ClassToSourceFile",
"ShortClassNameIndex"
};
for (int i = 0; i < data.length; i++) {
final File file = new File(outputRoot, info[i]);
FileUtil.createIfDoesntExist(file);
try (PrintStream stream = new PrintStream(file)) {
data[i].toStream(myContext, stream);
}
catch (FileNotFoundException e) {
e.printStackTrace();
}
}
}
} | apache-2.0 |
parkito/RooMMateGeneralGUI | src/main/java/ru/siksmfp/spring/annotation/security/config/SecurityWebApplicationInitializer.java | 395 | package ru.siksmfp.spring.annotation.security.config;
import org.springframework.security.web.context.AbstractSecurityWebApplicationInitializer;
import org.springframework.stereotype.Component;
/**
* @author Artem Karnov @date 4/17/2018.
* @email artem.karnov@t-systems.com
*/
@Component
public class SecurityWebApplicationInitializer extends AbstractSecurityWebApplicationInitializer {
}
| apache-2.0 |
XClouded/t4f-core | java/concurrent/src/main/java/io/aos/concurrent/scale/LoopHandler.java | 2551 | /****************************************************************
* Licensed to the AOS Community (AOS) under one or more *
* contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The AOS licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
package io.aos.concurrent.scale;
public class LoopHandler implements Runnable {
protected class LoopRange {
public int start, end;
}
protected Thread lookupThreads[];
protected int startLoop, endLoop, curLoop, numThreads;
public LoopHandler(int start, int end, int threads) {
startLoop = curLoop = start;
endLoop = end;
numThreads = threads;
lookupThreads = new Thread[numThreads];
}
protected synchronized LoopRange loopGetRange() {
if (curLoop >= endLoop)
return null;
LoopRange ret = new LoopRange();
ret.start = curLoop;
curLoop += (endLoop-startLoop)/numThreads+1;
ret.end = (curLoop<endLoop) ? curLoop : endLoop;
return ret;
}
public void loopDoRange(int start, int end) {
}
public void loopProcess() {
for (int i = 0; i < numThreads; i++) {
lookupThreads[i] = new Thread(this);
lookupThreads[i].start();
}
for (int i = 0; i < numThreads; i++) {
try {
lookupThreads[i].join();
lookupThreads[i] = null;
} catch (InterruptedException iex) {}
}
}
public void run() {
LoopRange str;
while ((str = loopGetRange()) != null) {
loopDoRange(str.start, str.end);
}
}
}
| apache-2.0 |
azusa/hatunatu | hatunatu/src/test/java/jp/fieldnotes/hatunatu/dao/impl/bean/Employee9.java | 1472 | /*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package jp.fieldnotes.hatunatu.dao.impl.bean;
import jp.fieldnotes.hatunatu.dao.annotation.tiger.Bean;
import jp.fieldnotes.hatunatu.dao.annotation.tiger.Column;
import java.io.Serializable;
@Bean(table = "EMP")
public class Employee9 implements Serializable {
private static final long serialVersionUID = 1L;
private long empno;
private String ename;
public long getEmpno() {
return this.empno;
}
public void setEmpno(long empno) {
this.empno = empno;
}
@Column("eNaMe")
public String getEname() {
return this.ename;
}
public void setEname(String ename) {
this.ename = ename;
}
public String toString() {
StringBuilder buf = new StringBuilder();
buf.append(empno).append(", ");
buf.append(ename);
return buf.toString();
}
}
| apache-2.0 |
Talend/components | components/components-jdbc/components-jdbc-runtime/src/main/java/org/talend/components/jdbc/avro/FloatToStringConverter.java | 1288 | //============================================================================
//
// Copyright (C) 2006-2022 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
//============================================================================
package org.talend.components.jdbc.avro;
import org.apache.avro.Schema;
import org.talend.daikon.avro.AvroUtils;
import org.talend.daikon.avro.converter.AvroConverter;
/**
* Converts datum Float to avro String
*/
public class FloatToStringConverter implements AvroConverter<Float, String> {
@Override
public Schema getSchema() {
return AvroUtils._string();
}
@Override
public Class<Float> getDatumClass() {
return Float.class;
}
@Override
public Float convertToDatum(String value) {
Float datumFloat = Float.parseFloat(value);
return datumFloat;
}
@Override
public String convertToAvro(Float value) {
String avroString = value.toString();
return avroString;
}
}
| apache-2.0 |
uiguard/uiguard | UIGuard/src/com/uiguard/entity/proxy/UGProxy.java | 3581 | package com.uiguard.entity.proxy;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import net.sf.cglib.proxy.Enhancer;
import net.sf.cglib.proxy.MethodInterceptor;
import net.sf.cglib.proxy.MethodProxy;
import com.google.common.base.Joiner;
import com.uiguard.entity.driver.IUGDriver;
import com.uiguard.exception.UiGuardException;
import com.uiguard.logger.IUiGuardLogger;
public abstract class UGProxy implements MethodInterceptor {
protected IUGDriver driver;
//The Object be proxyed
private Object obj;
public Object getObj(){
return obj;
}
public Object createProxy(IUGDriver driver, Object obj, Class<?>[] argumentTypes, Object ... arguments) {
this.driver = driver;
this.obj = obj;
Enhancer enhancer = createEnhence();
if(arguments.length == 0){
return enhancer.create();
}
return enhancer.create(argumentTypes,arguments);
}
private Enhancer createEnhence() {
Enhancer enhancer = new Enhancer();
enhancer.setSuperclass(obj.getClass());
enhancer.setCallback(this);
return enhancer;
}
protected abstract void actionBeforeAllMethodInvoke(Method method, Object[] args) throws UiGuardException;
protected abstract void actionBeforePublicMethodInvoke(Method method, Object[] args) throws UiGuardException;
@Override
public Object intercept(Object obj, Method method, Object[] args,
MethodProxy proxy) throws Throwable {
actionBeforeAllMethodInvoke(method, args);
if(Modifier.PUBLIC == method.getModifiers()%2 ){
actionBeforePublicMethodInvoke(method, args);
return invokeMthod(method, args);
}
return invokeMthod(method, args);
}
protected Object invokeMthod(Method method, Object[] args) throws UiGuardException{
try {
return invokeMethodWithoutCatchException(method, args);
} catch (Exception e) {
driver.helper().handleFailure("[Method invoke error]"+getMessage(method, args), e);
return null;
}
}
protected Object invokeMethodWithoutCatchException(Method method,
Object[] args) throws UiGuardException {
method.setAccessible(true);
try {
return method.invoke(obj, args);
} catch (IllegalArgumentException e) {
throw new UiGuardException("IllegalArgumentException", e);
} catch (IllegalAccessException e) {
throw new UiGuardException("IllegalAccessException", e);
} catch (InvocationTargetException e) {
throw new UiGuardException("InvocationTargetException", e);
}
}
protected void addTraceLog(Method method, Object[] args) {
if(canAddTraceLog(method))
getLogger(driver).trace(getTraceMessage(method,args));
}
private String getTraceMessage(Method method, Object[] args){
return getObj().getClass().getSimpleName()+ "->" +
method.getName() +"("+getParameterStringFromArgs(args)+")";
}
protected String getMessage(Method method, Object[] args){
return "["+obj.getClass().getSimpleName()+"]"+
"["+method.getName()+"]"+
"["+getParameterStringFromArgs(args)+"]";
}
private String getParameterStringFromArgs(Object[] args){
return Joiner.on(",").join(args);
}
protected static final IUiGuardLogger getLogger(IUGDriver driver){
return driver.helper().getUiGuardLogger();
}
protected boolean canAddInfoAndTraceLog(Method method) {
return canAddInfoLog(method) && canAddTraceLog(method);
}
protected abstract boolean canAddInfoLog(Method method);
protected abstract boolean canAddTraceLog(Method method);
}
| apache-2.0 |
gouravshenoy/airavata | modules/workflow/workflow-core/src/main/java/org/apache/airavata/workflow/core/WorkflowFactory.java | 2215 | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.workflow.core;
import org.apache.airavata.common.exception.ApplicationSettingsException;
import org.apache.airavata.common.utils.ServerSettings;
import org.apache.airavata.workflow.core.parser.JsonWorkflowParser;
import org.apache.airavata.workflow.core.parser.WorkflowParser;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.Constructor;
/**
* All classes implement this WorkflowFactory interface, should be abstract or singleton.
*/
public class WorkflowFactory {
private static final Logger log = LoggerFactory.getLogger(WorkflowFactory.class);
public static WorkflowParser getWorkflowParser(String workflowString) throws Exception {
WorkflowParser workflowParser = null;
try {
String wfParserClassName = ServerSettings.getWorkflowParser();
Class<?> aClass = Class.forName(wfParserClassName);
Constructor<?> constructor = aClass.getConstructor(String.class);
workflowParser = (WorkflowParser) constructor.newInstance(workflowString);
} catch (ApplicationSettingsException e) {
log.info("A custom workflow parser is not defined, Use default Airavata JSON workflow parser");
}
if (workflowParser == null) {
workflowParser = new JsonWorkflowParser(workflowString);
}
return workflowParser;
}
}
| apache-2.0 |
codehaus/izpack | izpack-gui/src/main/java/com/izforge/izpack/gui/TwoColumnLayout.java | 37184 | /*
* IzPack - Copyright 2001-2008 Julien Ponge, All Rights Reserved.
*
* http://izpack.org/ http://izpack.codehaus.org/
*
* Copyright 2002 Elmar Grom
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.izforge.izpack.gui;
import java.awt.*;
import java.util.ArrayList;
/**
* This class implements a layout manager that generally lays out components in two columns. <BR>
* <BR>
* The design goal for this layout manager was to lay out forms for data entry, where there are
* several rows of entry fields with associated labels. The goal was to have the beginning off all
* labels line up, as well as the left edge of all the data entry fields. This leads to a situation
* where all components are essentially laid out in two columns. The columns adjust to accommodate
* components of various sizes. This means that components that are added are laid out top to
* bottom, either in the left column, in the right column or straddling both columns. In addition to
* this general behavior, the following additional layout capabilities are supported:<br>
* <ul>
* <li>Resizable margins are provided on the left and right side.
* <li>A special region is provided at the top that is only affected by the side margins but not by
* any other layout behavior.
* <li>It is possible to specify the vertical positioning of the cluster of laid out components for
* the case that they do not occupy the entire available real estate.
* <li>Individual components can be indented.
* </ul>
* <p/>
* <b>The Layout Behavior</b> <br>
* <br>
* The left and right margin are absolute. This means that they can not be penetrated by components.
* All layout happens between the limits established by these margins. The purpose of these margins
* is to ensure that components are not laid out all the way to the edge of their container, without
* the need to set matching borders for each component. <br>
* <br>
* The title margin at the top factors only into the layout behavior if there is a component set to
* be laid out in that region, otherwise it is ignored. <br>
* <br>
* The vertical space available to each row of components depends on the space requirements of the
* tallest component in that row. Both components are placed vertically centered in their row. <br>
* <br>
* All horizontal layout is based on the position of three vertical rules, the left rule, the right
* rule and the center rule. <br>
* <br>
* <img src="doc-files/TwoColumnLayout.gif"/> <br>
* <br>
* The actual position of each rule depends on the alignment strategy, margin settings and component
* sizes. Regardless of these factors, components placed in the left column are <i>always</i>
* positioned with their left edge aligned with the left rule. Components placed in the right column
* are <i>always</i> positioned with their left edge aligned with the center rule. If a component
* straddles both columns, it is <i>always</i> positioned with the left edge aligned with the left
* rule, but is allowed to extend all the way to the right rule. The only exception is a component
* that is specified with an indent. In this case the component is moved to the right of the
* respective rule by the indent amount. <br>
* <br>
* The location of the rules is determined based on the alignment strategy as follows:<br>
* <ul>
* <li>The right rule is always located at the edge of the right margin.
* <li><b>Left Alignment:</b> The left rule is located the edge of the left margin. The center rule
* is located far enough to the right to clear the widest component in the left column.
* <li><b>Center Alignment:</b> The center rule is located at the center of the panel. The left rule
* is located far enough to the left to make the widest component in the left column fit.
* <li><b>Right Alignment</b> The center rule is located far enough to the left of the right rule to
* make the widest component in the right column fit. The left rule is located far enough to the
* left to make the widest component in the left column fit.
* </ul>
* All components clump together vertically and are positioned right beneath the title margin. This
* is of course not a very appealing presentation. By setting how the remaining vertical space is
* distributed above and below the cluster of components the cluster can be positioned more
* favorably (see the shaded area in the illustration).
*
* @author Elmar Grom
* @version 0.0.1 / 11/14/02
* @see com.izforge.izpack.gui.TwoColumnConstraints
*/
public class TwoColumnLayout implements LayoutManager2
{
public static final int LEFT = 0;
public static final int RIGHT = 1;
public static final int CENTER = 2;
/**
* holds all the components and layout constraints.
*/
@SuppressWarnings("unchecked")
private java.util.List<TwoColumnConstraints>[] components = new ArrayList[]{new ArrayList<TwoColumnConstraints>(),
new ArrayList<TwoColumnConstraints>()};
/**
* holds the component to be placed in the title region, including layout constraints.
*/
private TwoColumnConstraints title = null;
/**
* the margin setting in % of the container's width
*/
private int margin = 0;
/**
* the setting for the buffer area on top of the component cluster in % of the left over height.
* if rigid is set to true, the setting for this will be the setting for the buffer area on top in terms of pixels
*/
private int topBuffer = 0;
/**
* the setting for the buffer area on top of the component to be used in pixels if true
* this setting is false by default
*/
private boolean rigid = false;
/**
* the indent setting in % of the conteiner's width
*/
private int indent = 0;
/**
* the gap between the two columns
*/
private int gap = 5;
private int colWidth;
private int alignment = LEFT;
private int leftRule;
private int rightRule;
private int centerRule;
private int titleHeight;
/**
* Constructs a <code>TwoColumnLayout</code> layout manager. To add components use the
* container's <code>add(comp, constraints)</code> method with a TwoColumnConstraints object.
*
* @param margin the margin width to use on the left and right side in % of the total container
* width. Values less than 0% and greater than 50% are not accepted.
* @param gap the gap between the two columns.
* @param indent the indent to use for components that have that constraint set. This is a value
* in pixels.
* @param topBuffer the percentage of left over vertical space to place on top of the component
* cluster. Values between 0% and 100% are accepted.
* @param colWidth with of the left column in percent of the whole panel. Set to 0 for automatic
* width.
* @param alignment how to align the overall layout. Legal values are LEFT, CENTER, RIGHT.
*/
public TwoColumnLayout(int margin, int gap, int indent, int topBuffer, boolean rigid, int colWidth, int alignment)
{
this.indent = indent;
this.gap = gap;
this.colWidth = colWidth;
this.rigid = rigid;
if ((margin >= 0) && (margin <= 50))
{
this.margin = margin;
}
if ((topBuffer >= 0) && (topBuffer <= 100) || rigid)
{
this.topBuffer = topBuffer;
}
if ((alignment == LEFT) || (alignment == CENTER) || (alignment == RIGHT))
{
this.alignment = alignment;
}
}
/**
* Constructs a <code>TwoColumnLayout</code> layout manager. To add components use the
* container's <code>add(comp, constraints)</code> method with a TwoColumnConstraints object.
*
* @param margin the margin width to use on the left and right side in % of the total container
* width. Values less than 0% and greater than 50% are not accepted.
* @param gap the gap between the two columns.
* @param indent the indent to use for components that have that constraint set. This is a value
* in pixels.
* @param topBuffer the percentage of left over vertical space to place on top of the component
* cluster. Values between 0% and 100% are accepted.
* @param alignment how to align the overall layout. Legal values are LEFT, CENTER, RIGHT.
*/
public TwoColumnLayout(int margin, int gap, int indent, int topBuffer, boolean rigid, int alignment)
{
this(margin, gap, indent, topBuffer, rigid, 0, alignment);
}
/**
* Sets the constraints for the specified component in this layout. <code>null</code> is a legal
* value for a component, but not for a constraints object.
*
* @param comp the component to be modified.
* @param constraints the constraints to be applied.
*/
public void addLayoutComponent(Component comp, Object constraints)
{
if (constraints == null)
{
return;
}
TwoColumnConstraints component = null;
try
{
component = (TwoColumnConstraints) constraints;
component = (TwoColumnConstraints) component.clone();
}
catch (Throwable exception)
{
return;
}
component.component = comp;
// ----------------------------------------------------
// the title component is recorded in a separate
// variable, displacing any component that might have
// been previously recorded for that location.
// ----------------------------------------------------
if (component.position == TwoColumnConstraints.NORTH)
{
title = component;
if (title.stretch)
{
title.align = LEFT;
}
}
// ----------------------------------------------------
// components that straddle both columns are a bit
// tricky because these components are recorded in the
// left column and the same row cannot contain a
// component in the right column.
//
// If there are fewer components in the left column
// than in the right one, a null is inserted at this
// place in the right column. This allows the component
// to use both columns. The component that previously
// occupied this position and any that were placed
// below will be pushed down by one row due to this
// action.
//
// If there are the same number of components in both
// columns or if there are fewer in the right column
// then the component is added to the left column and
// then the right column filled with null until both
// contain the same number of components. this means
// that any components that will now be placed in the
// right column are positioned beneath this component.
// Unoccupied spots higher in the right column become
// inaccessible.
// ----------------------------------------------------
else if (component.position == TwoColumnConstraints.BOTH)
{
// first make sure that both columns have the same number of entries
while (components[RIGHT].size() > components[LEFT].size())
{
components[LEFT].add(null);
}
while (components[LEFT].size() > components[RIGHT].size())
{
components[RIGHT].add(null);
}
components[LEFT].add(component);
components[RIGHT].add(null);
}
// ----------------------------------------------------
// WEST components are added to the left column
// ----------------------------------------------------
else if (component.position == TwoColumnConstraints.WEST)
{
components[LEFT].add(component);
}
// ----------------------------------------------------
// WESTONLY components are added to the left column
// the right column has to be kept free
// ----------------------------------------------------
else if (component.position == TwoColumnConstraints.WESTONLY)
{
components[LEFT].add(component);
// fill right column to make sure nothing is placed there
while (components[RIGHT].size() < components[LEFT].size())
{
components[RIGHT].add(null);
}
}
// ----------------------------------------------------
// EAST components are added to the right column
// ----------------------------------------------------
else if (component.position == TwoColumnConstraints.EAST)
{
components[RIGHT].add(component);
}
// ----------------------------------------------------
// EASTONLY components are added to the left column
// the right column has to be kept free
// ----------------------------------------------------
else if (component.position == TwoColumnConstraints.EASTONLY)
{
components[RIGHT].add(component);
// fill left column to make sure nothing is placed there
while (components[LEFT].size() < components[RIGHT].size())
{
components[LEFT].add(null);
}
}
// ----------------------------------------------------
// If the position did not match any of the above
// criteria then the component is not added and
// consequently will not be laid out.
// ----------------------------------------------------
}
/**
* Lays out the container in the specified panel.
*
* @param parent the component which needs to be laid out.
*/
public void layoutContainer(Container parent)
{
positionRules(parent);
positionTitle(parent);
positionComponents(parent);
}
/**
* Positions the three rules in preparation for layout. Sets the variables:<br>
* <ul>
* <li><code>leftRule</code>
* <li><code>rightRule</code>
* <li><code>centerRule</code>
* </ul>
*
* @param parent the component which needs to be laid out.
*/
private void positionRules(Container parent)
{
int margin = margin(parent);
if (alignment == LEFT)
{
leftRule = margin;
rightRule = parent.getWidth() - margin;
if (colWidth > 0)
{
centerRule = leftRule + (rightRule - leftRule) / 100 * colWidth + gap;
}
else
{
centerRule = leftRule + minimumColumnWidth(LEFT, parent) + gap;
}
}
else if (alignment == CENTER)
{
centerRule = (int) (parent.getMinimumSize().getWidth() / 2);
leftRule = centerRule - minimumColumnWidth(LEFT, parent) - gap;
rightRule = parent.getWidth() - margin;
}
else if (alignment == RIGHT)
{
rightRule = parent.getWidth() - margin;
leftRule = centerRule - minimumColumnWidth(LEFT, parent) - gap;
if (colWidth > 0)
{
centerRule = rightRule - (rightRule - leftRule) / 100 * colWidth;
}
else
{
centerRule = rightRule - minimumColumnWidth(RIGHT, parent);
}
}
}
/**
* Positions the title component and sets the variable <code>titleHeight</code>. <b>Note:</b>
* this method depends on the fact that the rules are set to their correct layout position.
*
* @param parent the component which needs to be laid out.
*/
private void positionTitle(Container parent)
{
if (title != null)
{
Component component = title.component;
int width = (int) component.getMinimumSize().getWidth();
titleHeight = (int) component.getMinimumSize().getHeight();
if (component != null)
{
if (title.stretch)
{
width = rightRule - leftRule;
component.setBounds(leftRule, 0, width, titleHeight);
}
else if (title.align == TwoColumnConstraints.LEFT)
{
component.setBounds(leftRule, 0, width, titleHeight);
}
else if (title.align == TwoColumnConstraints.CENTER)
{
int left = centerRule - (width / 2);
component.setBounds(left, 0, width, titleHeight);
}
else if (title.align == TwoColumnConstraints.RIGHT)
{
int left = rightRule - width;
component.setBounds(left, 0, width, titleHeight);
}
}
}
}
/**
* Positions all components in the container.
*
* @param parent the component which needs to be laid out.
*/
private void positionComponents(Container parent)
{
int usedHeight = titleHeight + minimumClusterHeight();
int topBuffer = topBuffer(usedHeight, parent);
int leftHeight = 0;
int rightHeight = 0;
if (topBuffer < 0)
{
topBuffer = 0;
}
int y = titleHeight + topBuffer;
for (int i = 0; i < rows(); i++)
{
leftHeight = height(i, LEFT);
rightHeight = height(i, RIGHT);
if (leftHeight > rightHeight)
{
int offset = (leftHeight - rightHeight) / 2;
positionComponent(y, i, LEFT, parent);
positionComponent((y + offset), i, RIGHT, parent);
y += leftHeight;
}
else if (leftHeight < rightHeight)
{
int offset = (rightHeight - leftHeight) / 2;
positionComponent((y + offset), i, LEFT, parent);
positionComponent(y, i, RIGHT, parent);
y += rightHeight;
}
else
{
positionComponent(y, i, LEFT, parent);
positionComponent(y, i, RIGHT, parent);
y += leftHeight;
}
}
}
/**
* Positions one component as instructed. Constraints for each component, such as
* <code>stretch</code>, <code>BOTH</code> and <code>indent</code> are taken into account. In
* addition, empty components are handled properly.
*
* @param y the y location within the container, where the component should be positioned.
* @param row the row of the component
* @param column the column of the component
* @param parent the container which needs to be laid out.
*/
private void positionComponent(int y, int row, int column, Container parent)
{
TwoColumnConstraints constraints = null;
try
{
constraints = components[column].get(row);
}
catch (Throwable exception)
{
return;
}
int x = 0;
if (constraints != null)
{
Component component = constraints.component;
int width = (int) component.getPreferredSize().getWidth();
int height = (int) component.getPreferredSize().getHeight();
// --------------------------------------------------
// set x to the appropriate rule. The only need to
// modify this is for indent
// --------------------------------------------------
boolean stretchBool = constraints.stretch;
boolean indentBool = constraints.indent;
int align = constraints.align;
if (constraints.position == TwoColumnConstraints.BOTH)
{
width = getWidth(leftRule, rightRule, stretchBool, indentBool, width);
if (width > (rightRule - leftRule))
{
stretchBool = true;
}
x = getPosition(leftRule, rightRule, stretchBool, indentBool, width, align);
}
else if (column == LEFT)
{
width = getWidth(leftRule, centerRule, stretchBool, indentBool, width);
x = getPosition(leftRule, centerRule, stretchBool, indentBool, width, align);
}
else
{
width = getWidth(centerRule, rightRule, stretchBool, indentBool, width);
x = getPosition(centerRule, rightRule, stretchBool, indentBool, width, align);
}
if (component != null)
{
component.setBounds(x, y, width, height);
}
}
}
private int getWidth(int left, int right, boolean stretch, boolean indent, int componentWidth)
{
int width = componentWidth;
// --------------------------------------------------
// set the width for stretch based on BOTH, LEFT and
// RIGHT positioning
// --------------------------------------------------
if (stretch)
{
width = right - left;
}
// --------------------------------------------------
// correct for indent if this option is set
// --------------------------------------------------
if (indent)
{
width -= this.indent;
}
return width;
}
private int getPosition(int left, int right, boolean stretch, boolean indent, int componentWidth, int align)
{
int position = left;
// --------------------------------------------------
// Correct position with alignment parameters
// --------------------------------------------------
if (align == TwoColumnConstraints.LEFT)
{
position = left;
}
else if (align == TwoColumnConstraints.CENTER)
{
position += (right - left) / 2 - componentWidth / 2 - this.gap / 2;
}
else if (align == TwoColumnConstraints.RIGHT)
{
position = right - componentWidth - this.gap;
}
// --------------------------------------------------
// correct for indent if this option is set
// --------------------------------------------------
if (indent)
{
position += this.indent;
}
return position;
}
/**
* Returns the minimum width of the column requested.
*
* @param column the columns to measure (LEFT / RIGHT)
* @param parent the component which needs to be laid out.
* @return the minimum width required to fits the components in this column
*/
private int minimumColumnWidth(int column, Container parent)
{
Component component = null;
int width = 0;
int temp = 0;
for (TwoColumnConstraints constraints : components[column])
{
if ((constraints != null) && (constraints.position != TwoColumnConstraints.BOTH))
{
component = constraints.component;
temp = (int) component.getMinimumSize().getWidth();
if (constraints.indent)
{
temp += indent;
}
if (temp > width)
{
width = temp;
}
}
}
return (width);
}
/**
* Retrunds the minimum width both columns together should have based on the minimum widths of
* all the components that straddle both columns and the minimum width of the title component.
*
* @param parent the component which needs to be laid out.
* @return the minimum width required to fis the components in this column
*/
private int minimumBothColumnsWidth(Container parent)
{
Component component = null;
//TwoColumnConstraints constraints = null;
int width = 0;
int temp = 0;
if (title != null)
{
component = title.component;
width = (int) component.getMinimumSize().getWidth();
}
for (TwoColumnConstraints constraints : components[LEFT])
{
if ((constraints != null) && (constraints.position == TwoColumnConstraints.BOTH))
{
component = constraints.component;
temp = (int) component.getMinimumSize().getWidth();
if (constraints.indent)
{
temp += indent;
}
if (temp > width)
{
width = temp;
}
}
}
return (width);
}
private int minimumClusterHeight()
{
int height = 0;
for (int i = 0; i < rows(); i++)
{
height += rowHeight(i);
}
return (height);
}
/**
* Returns the number of rows that need to be laid out.
*/
private int rows()
{
int rows = 0;
int leftRows = components[LEFT].size();
int rightRows = components[RIGHT].size();
if (leftRows > rightRows)
{
rows = leftRows;
}
else
{
rows = rightRows;
}
return (rows);
}
/**
* Measures and returns the minimum height required to render the components in the indicated
* row.
*
* @param row the index of the row to measure
*/
private int rowHeight(int row)
{
int height = 0;
int height1 = height(row, LEFT);
int height2 = height(row, RIGHT);
// ----------------------------------------------------
// take the higher one
// ----------------------------------------------------
if (height1 > height2)
{
height = height1;
}
else
{
height = height2;
}
return (height);
}
/**
* Measures and returns the minimum height required to render the component in the indicated row
* and column.
*
* @param row the index of the row to measure
* @param column the column of the component to measure (<code>LEFT</code> or <code>RIGHT</code>
* )
*/
private int height(int row, int column)
{
int height = 0;
int width = 0;
Component component;
TwoColumnConstraints constraints;
try
{
constraints = components[column].get(row);
if (constraints != null)
{
component = constraints.component;
width = (int) component.getMinimumSize().getWidth();
height = (int) component.getMinimumSize().getHeight();
if (constraints.position == TwoColumnConstraints.WEST)
{
if (width > (centerRule - leftRule))
{
component.setBounds(0, 0, (centerRule - leftRule), height);
}
}
else if (constraints.position == TwoColumnConstraints.EAST)
{
if (width > (rightRule - centerRule))
{
component.setBounds(0, 0, (rightRule - centerRule), height);
}
}
else if (constraints.position == TwoColumnConstraints.BOTH)
{
if (width > (rightRule - leftRule))
{
component.setBounds(0, 0, (rightRule - leftRule), height);
}
}
height = (int) component.getMinimumSize().getHeight();
}
}
// ----------------------------------------------------
// we might get an exception if one of the vectors is
// shorter, because we index out of bounds. If there
// is nothing there then the height is 0, nothing
// further to worry about!
// ----------------------------------------------------
catch (Throwable exception)
{
}
return (height);
}
/**
* Computes the margin value based on the container width and the margin setting.
*
* @param parent the component which needs to be laid out.
*/
private int margin(Container parent)
{
int amount = (int) (((parent.getSize().getWidth()) * margin) / 100);
return (amount);
}
/**
* Computes the top buffer value based on the container width and the setting for the top buffer
* If rigid is set to true simply return the number of pixels to use as a top buffer indicated by the topBuffer variable
*
* @param usedHeight the amount of the parent component's height that is already in use (height
* of the title and the combined height of all rows).
* @param parent the component which needs to be laid out.
*/
private int topBuffer(int usedHeight, Container parent)
{
if (rigid)
{
return topBuffer;
}
else
{
int amount = ((int) parent.getSize().getHeight()) - usedHeight;
amount = amount * topBuffer / 100;
return (amount);
}
}
/*--------------------------------------------------------------------------*/
/**
* Computes the indent value based on the container width and the indent setting.
*
* @param parent the component which needs to be laid out.
*/
/*--------------------------------------------------------------------------*/
/*
* private int indent (Container parent) { int amount = (int)(((parent.getMinimumSize
* ().getWidth ()) * indent) / 100);
*
* return (amount); }
*/
/**
* Calculates the preferred size dimensions for the specified panel given the components in the
* specified parent container.
*
* @param parent the component to be laid out
*/
public Dimension preferredLayoutSize(Container parent)
{
return (minimumLayoutSize(parent));
}
/**
* Calculates the minimum size dimensions for the specified panel given the components in the
* specified parent container.
*
* @param parent the component to be laid out
*/
public Dimension minimumLayoutSize(Container parent)
{
positionTitle(parent);
int width = minimumBothColumnsWidth(parent);
int height = minimumClusterHeight() + titleHeight;
if (rigid)
{
height = height+ topBuffer;
}
return (new Dimension(width, height));
}
/**
* Calculates the maximum size dimensions for the specified panel given the components in the
* specified parent container.
*
* @param parent the component to be laid out
*/
public Dimension maximumLayoutSize(Container parent)
{
return (minimumLayoutSize(parent));
}
/**
* Returns the alignment along the x axis. This specifies how the component would like to be
* aligned relative to other components. The value should be a number between 0 and 1 where 0
* represents alignment along the origin, 1 is aligned the furthest away from the origin, 0.5 is
* centered, etc.
*
* @param parent the component to be laid out
*/
public float getLayoutAlignmentX(Container parent)
{
return (0);
}
/**
* Returns the alignment along the y axis. This specifies how the component would like to be
* aligned relative to other components. The value should be a number between 0 and 1 where 0
* represents alignment along the origin, 1 is aligned the furthest away from the origin, 0.5 is
* centered, etc.
*
* @param parent the component to be laid out
*/
public float getLayoutAlignmentY(Container parent)
{
return (0);
}
/**
* Invalidates the layout, indicating that if the layout manager has cached information it
* should be discarded.
*
* @param parent the component to be laid out
*/
public void invalidateLayout(Container parent)
{
leftRule = 0;
rightRule = 0;
centerRule = 0;
titleHeight = 0;
}
/**
* Adds the specified component with the specified name to the layout. This version is not
* supported, use <code>addLayoutComponent</code> with layout contsraints.
*
* @param name the component name
* @param comp the component to be added
*/
public void addLayoutComponent(String name, Component comp)
{
}
/**
* This functionality removes the TwoColumnConstraints from Vectors so that alignment of
* components on UserInputPanel doesn't get dirty
*
* @param comp the component to be removed
*/
public void removeLayoutComponent(Component comp)
{
java.util.List<TwoColumnConstraints> left = components[LEFT];
java.util.List<TwoColumnConstraints> right = components[RIGHT];
for (int i = 0; i < left.size(); i++)
{
TwoColumnConstraints constraints = left.get(i);
if (constraints == null)
{
continue;
}
Component ctemp = constraints.component;
if (ctemp != null && ctemp.equals(comp))
{
if (constraints.position == TwoColumnConstraints.BOTH
|| constraints.position == TwoColumnConstraints.WESTONLY)
{
right.remove(i);
}
break;
}
}
for (int j = 0; j < right.size(); j++)
{
TwoColumnConstraints constraints = right.get(j);
if (constraints == null)
{
continue;
}
Component ctemp = constraints.component;
if (ctemp != null && ctemp.equals(comp))
{
if (constraints.position == TwoColumnConstraints.BOTH
|| constraints.position == TwoColumnConstraints.EASTONLY)
{
left.remove(j);
}
break;
}
}
}
/**
* This method is provided for conveninence of debugging layout problems. It renders the three
* rules and the limit of the title marign visible after these positions have been computed. In
* addition, the indent locations are shown as dashed lines. To use this functionality do the
* following:<br>
* <br>
* <ul>
* <li>in the container using this layout manager override the <code>paint()</code> method.
* <li>in that method, first call <code>super.paint()</code>
* <li>then call this method
* </ul>
* <br>
* <b>Note:</b> cast the graphics object received in the <code>paint()</code> method to
* <code>Graphics2D</code> when making the call.<br>
* <br>
*
* @param graphics the graphics context used for drawing.
* @param color the color to use for rendering the layout grid
*/
public void showRules(Graphics2D graphics, Color color)
{
int height = graphics.getClipBounds().height;
Stroke currentStroke = graphics.getStroke();
Color currentColor = graphics.getColor();
Stroke stroke = new BasicStroke(1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_BEVEL, 1.5f, new float[]{10, 5}, 5);
graphics.setColor(color);
graphics.drawLine(leftRule, 0, leftRule, height);
graphics.drawLine(centerRule, titleHeight, centerRule, height);
graphics.drawLine(rightRule, 0, rightRule, height);
graphics.drawLine(leftRule, titleHeight, rightRule, titleHeight);
graphics.setStroke(stroke);
graphics.drawLine((leftRule + indent), titleHeight, (leftRule + indent), height);
graphics.drawLine((centerRule + indent), titleHeight, (centerRule + indent), height);
graphics.setStroke(currentStroke);
graphics.setColor(currentColor);
}
}
/*---------------------------------------------------------------------------*/
| apache-2.0 |
gijsleussink/ceylon | compiler-java/test/src/com/redhat/ceylon/compiler/java/test/ConcurrentTestsNoRuntime.java | 5604 | /*
* Copyright Red Hat Inc. and/or its affiliates and other contributors
* as indicated by the authors tag. All rights reserved.
*
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU General Public License version 2.
*
* This particular file is subject to the "Classpath" exception as provided in the
* LICENSE file that accompanied this code.
*
* This program is distributed in the hope that it will be useful, but WITHOUT A
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU General Public License for more details.
* You should have received a copy of the GNU General Public License,
* along with this distribution; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*/
package com.redhat.ceylon.compiler.java.test;
import org.junit.runner.RunWith;
import org.junit.runners.Suite.SuiteClasses;
import com.redhat.ceylon.ant.AntToolTests;
import com.redhat.ceylon.ceylondoc.test.CeylonDocToolTests;
import com.redhat.ceylon.compiler.java.codegen.NamingTests;
import com.redhat.ceylon.compiler.java.test.annotations.AnnotationsTests;
import com.redhat.ceylon.compiler.java.test.bc.BcTests;
import com.redhat.ceylon.compiler.java.test.cargeneration.CarGenerationTests;
import com.redhat.ceylon.compiler.java.test.cmr.CMRTests;
import com.redhat.ceylon.compiler.java.test.compat.CompatTests;
import com.redhat.ceylon.compiler.java.test.cmr.CMRHTTPTests;
import com.redhat.ceylon.compiler.java.test.expression.ExpressionTests;
import com.redhat.ceylon.compiler.java.test.expression.ExpressionTests2;
import com.redhat.ceylon.compiler.java.test.expression.comprehensions.ComprehensionTests;
import com.redhat.ceylon.compiler.java.test.expression.ref.StaticRefTests;
import com.redhat.ceylon.compiler.java.test.fordebug.SourcePositionsTests;
import com.redhat.ceylon.compiler.java.test.fordebug.TraceTests;
import com.redhat.ceylon.compiler.java.test.interop.InteropTests;
import com.redhat.ceylon.compiler.java.test.issues.IssuesTests_0000_0499;
import com.redhat.ceylon.compiler.java.test.issues.IssuesTests_0500_0999;
import com.redhat.ceylon.compiler.java.test.issues.IssuesTests_1000_1499;
import com.redhat.ceylon.compiler.java.test.issues.IssuesTests_1500_1999;
import com.redhat.ceylon.compiler.java.test.issues.IssuesTests_2000_2499;
import com.redhat.ceylon.compiler.java.test.issues.IssuesTests_5500_5999;
import com.redhat.ceylon.compiler.java.test.issues.IssuesTests_6000_6499;
import com.redhat.ceylon.compiler.java.test.issues.PackageIssuesTests;
import com.redhat.ceylon.compiler.java.test.misc.MiscTests;
import com.redhat.ceylon.compiler.java.test.model.ModelLoaderTests;
import com.redhat.ceylon.compiler.java.test.model.TypeParserTests;
import com.redhat.ceylon.compiler.java.test.model.ValueTypeTests;
import com.redhat.ceylon.compiler.java.test.nativecode.NativeTests;
import com.redhat.ceylon.compiler.java.test.quoting.QuotingTests;
import com.redhat.ceylon.compiler.java.test.recovery.RecoveryTests;
import com.redhat.ceylon.compiler.java.test.reporting.ReportingTests;
import com.redhat.ceylon.compiler.java.test.statement.OptimizationTests;
import com.redhat.ceylon.compiler.java.test.statement.StatementTests;
import com.redhat.ceylon.compiler.java.test.statement.TryCatchTests;
import com.redhat.ceylon.compiler.java.test.structure.SerializableTests;
import com.redhat.ceylon.compiler.java.test.structure.StructureTests;
import com.redhat.ceylon.compiler.java.test.structure.StructureTests2;
import com.redhat.ceylon.compiler.java.test.structure.StructureTests3;
import com.redhat.ceylon.launcher.test.BootstrapTests;
import com.redhat.ceylon.tools.test.CompilerToolsTests;
/**
* DO NOT USE in Eclipse for development as it doesn't run the runtime tests
* This is only useful for ant.
*/
@RunWith(ConcurrentSuite.class)
@SuiteClasses({
// those take the longest time to run, so we start them first
ExpressionTests.class,
ExpressionTests2.class,
StructureTests.class,
StructureTests2.class,
StructureTests3.class,
SerializableTests.class,
CMRHTTPTests.class,
IssuesTests_0000_0499.class,
IssuesTests_0500_0999.class,
IssuesTests_1000_1499.class,
IssuesTests_1500_1999.class,
IssuesTests_2000_2499.class,
IssuesTests_5500_5999.class,
IssuesTests_6000_6499.class,
MiscTests.class,
CeylonDocToolTests.class,
CompilerToolsTests.class,
// those can run in any order
NamingTests.class,
AnnotationsTests.class,
InteropTests.class,
ModelLoaderTests.class,
ValueTypeTests.class,
PackageIssuesTests.class,
RecoveryTests.class,
StatementTests.class,
OptimizationTests.class,
TryCatchTests.class,
TypeParserTests.class,
QuotingTests.class,
CMRTests.class,
// RuntimeSuite.class,
// MetamodelTests.class,
// LanguageSatisfactionSuite.class,
// LanguageSuite.class,
BcTests.class,
CompatTests.class,
ComprehensionTests.class,
StaticRefTests.class,
AntToolTests.class,
TraceTests.class,
CarGenerationTests.class,
ReportingTests.class,
SourcePositionsTests.class,
NativeTests.class,
BootstrapTests.class,
// FIXME Disabled: it does not run currently in parallel
// TopLevelToolTests.class,
// Unable to run due to OOMs
// IntegrationTests.class,
})
public class ConcurrentTestsNoRuntime {
}
| apache-2.0 |
consulo/consulo | modules/base/lang-impl/src/main/java/com/intellij/codeInsight/actions/ShowReformatFileDialog.java | 2785 | /*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.actions;
import com.intellij.lang.LanguageFormatting;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import org.jetbrains.annotations.NonNls;
public class ShowReformatFileDialog extends AnAction implements DumbAware {
private static final @NonNls String HELP_ID = "editing.codeReformatting";
@Override
public void update(AnActionEvent event) {
Presentation presentation = event.getPresentation();
DataContext dataContext = event.getDataContext();
Project project = dataContext.getData(CommonDataKeys.PROJECT);
Editor editor = dataContext.getData(CommonDataKeys.EDITOR);
if (project == null || editor == null) {
presentation.setEnabled(false);
return;
}
PsiFile file = PsiDocumentManager.getInstance(project).getPsiFile(editor.getDocument());
if (file == null || file.getVirtualFile() == null) {
presentation.setEnabled(false);
return;
}
if (LanguageFormatting.INSTANCE.forContext(file) != null) {
presentation.setEnabled(true);
}
}
@Override
public void actionPerformed(AnActionEvent event) {
Presentation presentation = event.getPresentation();
DataContext dataContext = event.getDataContext();
Project project = dataContext.getData(CommonDataKeys.PROJECT);
Editor editor = dataContext.getData(CommonDataKeys.EDITOR);
if (project == null || editor == null) {
presentation.setEnabled(false);
return;
}
PsiFile file = PsiDocumentManager.getInstance(project).getPsiFile(editor.getDocument());
if (file == null || file.getVirtualFile() == null) {
presentation.setEnabled(false);
return;
}
boolean hasSelection = editor.getSelectionModel().hasSelection();
LayoutCodeDialog dialog = new LayoutCodeDialog(project, file, hasSelection, HELP_ID);
dialog.show();
if (dialog.isOK()) {
new FileInEditorProcessor(file, editor, dialog.getRunOptions()).processCode();
}
}
}
| apache-2.0 |
hoolrory/VideoInfoViewer | app/src/main/java/com/roryhool/videoinfoviewer/CreditsFragment.java | 3332 | /**
Copyright (c) 2016 Rory Hool
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
**/
package com.roryhool.videoinfoviewer;
import java.io.IOException;
import java.io.InputStream;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.text.method.LinkMovementMethod;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
public class CreditsFragment extends Fragment {
@Override
public View onCreateView( LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState ) {
View view = inflater.inflate( R.layout.fragment_credits, container, false );
setHasOptionsMenu( true );
try {
InputStream is = view.getContext().getAssets().open( "apache_license_2.txt" );
int size = is.available();
byte[] buffer = new byte[size];
is.read( buffer );
is.close();
String str = new String( buffer );
TextView textView = (TextView) view.findViewById( R.id.apache_license );
textView.setText( str );
} catch ( IOException e ) {
e.printStackTrace();
}
TextView iconCreditText = (TextView) view.findViewById( R.id.icon_credit );
iconCreditText.setMovementMethod( LinkMovementMethod.getInstance() );
TextView mp4parserCreditText = (TextView) view.findViewById( R.id.mp4parser_credit );
mp4parserCreditText.setMovementMethod( LinkMovementMethod.getInstance() );
TextView gsonCreditText = (TextView) view.findViewById( R.id.gson_credit );
gsonCreditText.setMovementMethod( LinkMovementMethod.getInstance() );
TextView reactiveXCreditText = (TextView) view.findViewById( R.id.reactivex_credit );
reactiveXCreditText.setMovementMethod( LinkMovementMethod.getInstance() );
TextView reactiveXAndroidCreditText = (TextView) view.findViewById( R.id.reactivexandroid_credit );
reactiveXAndroidCreditText.setMovementMethod( LinkMovementMethod.getInstance() );
TextView ottoCreditText = (TextView) view.findViewById( R.id.otto_credit );
ottoCreditText.setMovementMethod( LinkMovementMethod.getInstance() );
TextView retrolambdaCreditText = (TextView) view.findViewById( R.id.retrolambda_credit );
retrolambdaCreditText.setMovementMethod( LinkMovementMethod.getInstance() );
TextView gradleRetrolambdaCreditText = (TextView) view.findViewById( R.id.gradle_retrolambda_credit );
gradleRetrolambdaCreditText.setMovementMethod( LinkMovementMethod.getInstance() );
return view;
}
@Override
public void onCreateOptionsMenu( Menu menu, MenuInflater inflater ) {
super.onCreateOptionsMenu( menu, inflater );
menu.clear();
}
}
| apache-2.0 |
tozny/sdk-java | examples/secretmessage/src/main/java/com/tozny/sdk/example/secretmessage/SessionResource.java | 3980 | /*
* SessionResource.java
*
* Copyright (C) 2016, Tozny, LLC.
* All Rights Reserved.
*
* Released under the Apache license. See the file "LICENSE"
* for more information.
*/
package com.tozny.sdk.example.secretmessage;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.tozny.sdk.RealmApi;
import com.tozny.sdk.internal.ProtocolHelpers;
import com.tozny.sdk.realm.RealmConfig;
import com.tozny.sdk.realm.config.ToznyRealmKeyId;
import com.tozny.sdk.realm.config.ToznyRealmSecret;
import java.io.IOException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import javax.ws.rs.DELETE;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Consumes;
import javax.ws.rs.FormParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
/**
* Receive signed data from the Tozny Javascript and log the user in if
* the authentication is successful.
*
* The Javascript frontend is configured to POST signed data to this URL
* when the user authenticates via their mobile device. If the signature
* is valid for our realm, we save the user's information to the session
* and allow them to access protected resources.
*/
@Path("/session")
public class SessionResource {
private final RealmApi realmApi;
private final String contextPath;
public SessionResource(String contextPath, RealmApi realmApi) {
this.realmApi = realmApi;
this.contextPath = contextPath;
}
/*
* POSTing a valid Tozny assertion creates an authenticated session.
* This is how a user logs in.
*/
@POST
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
public Response createSession(
@FormParam("signed_data") String signedData,
@FormParam("signature") String signature,
@Context HttpServletRequest req
) {
if (signature == null || signedData == null) {
return Response.status(badRequest()).build();
}
if (realmApi.verifyLogin(signedData, signature)) {
UserInfo userInfo;
try {
userInfo = parseSignedData(signedData, UserInfo.class);
}
catch (IOException e) {
return Response.serverError().entity(e).build();
}
HttpSession session = req.getSession();
session.setAttribute("userInfo", userInfo);
return Response
.created(
UriBuilder
.fromPath(contextPath)
.path(getClass())
.build())
.build();
} else {
return notAuthorized();
}
}
/*
* DELETEing destroys the user's authenticated session - if one is active.
* This is how the user logs out.
*/
@DELETE
public Response destroySession(@Context HttpServletRequest req) {
HttpSession session = req.getSession(false);
if (session != null) {
session.invalidate();
}
return Response.noContent().build();
}
private <T> T parseSignedData(String signedData, Class<T> klass) throws IOException {
String json = new String(ProtocolHelpers.base64UrlDecode(signedData));
ObjectMapper mapper = new ObjectMapper();
return mapper.readValue(json, klass);
}
private Response.StatusType badRequest() {
return new Response.StatusType() {
public Response.Status.Family getFamily() {
return Response.Status.Family.CLIENT_ERROR;
}
public String getReasonPhrase() { return "400 Bad Request"; }
public int getStatusCode() { return 400; }
};
}
private Response notAuthorized() {
return Response
.status(Response.Status.UNAUTHORIZED)
.build();
}
}
| apache-2.0 |
Esri/geoportal-server | geoportal/src/com/esri/gpt/framework/mail/MailRequest.java | 6432 | /* See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* Esri Inc. licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.esri.gpt.framework.mail;
import com.esri.gpt.framework.collection.StringSet;
import com.esri.gpt.framework.util.Val;
import static com.esri.gpt.framework.util.Val.stripControls;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import javax.mail.Authenticator;
import javax.mail.Message;
import javax.mail.MessagingException;
import javax.mail.Session;
import javax.mail.Transport;
import javax.mail.internet.AddressException;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeMessage;
import static org.apache.commons.lang3.StringEscapeUtils.escapeHtml4;
/**
* A simple client for sending SMTP based E-Mail messages.
*/
public class MailRequest {
// class variables =============================================================
/** Mime type - HTML, "text/html; charset=UTF-8" */
public static final String MIMETYPE_HTML = "text/html; charset=UTF-8";
/** Mime type - plain, "text/plain; charset=UTF-8" */
public static final String MIMETYPE_PLAIN = "text/plain; charset=UTF-8";
// instance variables ==========================================================
private Authenticator _authenticator;
private String _body = "";
private String _fromAddress = "";
private String _host = "";
private String _mimeType = MIMETYPE_PLAIN;
private int _port = -1;
private StringSet _recipients;
private String _subject = "";
private String _toAddress = "";
private Map<String, String> _props = new HashMap<String, String>();
//constructors ================================================================
/** Default constructor. */
public MailRequest() {
setRecipients(new StringSet());
}
/**
* Constructs with an authenticator if crendentials are required by the mail server.
* @param authenticator the authenticator
*/
public MailRequest(Authenticator authenticator) {
_authenticator = authenticator;
setRecipients(new StringSet());
}
// properties ==================================================================
/**
* Gets the body.
* @return the body
*/
public String getBody() {
return _body;
}
/**
* Sets the body.
* @param body the body
*/
public void setBody(String body) {
_body = Val.chkStr(body);
}
/**
* Gets the from E-Mail address.
* @return the from address
*/
public String getFromAddress() {
return _fromAddress;
}
/**
* Sets the from E-Mail address.
* @param address the from address
*/
public void setFromAddress(String address) {
_fromAddress = Val.chkStr(address);
}
/**
* Gets the host.
* @return the host
*/
public String getHost() {
return _host;
}
/**
* Sets the host.
* @param host the host
*/
public void setHost(String host) {
_host = Val.chkStr(host);
}
/**
* Gets the Mime type.
* @return the Mime type
*/
public String getMimeType() {
return _mimeType;
}
/**
* Sets the Mime type to text/html.
*/
public void setMimeTypeHtml() {
_mimeType = MIMETYPE_HTML;
}
/**
* Sets the Mime type to text/plain.
*/
public void setMimeTypePlain() {
_mimeType = MIMETYPE_PLAIN;
}
/**
* Gets the port.
* @return the port
*/
public int getPort() {
return _port;
}
/**
* Sets the port.
* @param port the port
*/
public void setPort(int port) {
_port = port;
}
/**
* Gets the recipients.
* @return the recipients
*/
public StringSet getRecipients() {
return _recipients;
}
/**
* Sets the recipients.
* @param recipients the recipients
*/
private void setRecipients(StringSet recipients) {
_recipients = recipients;
}
/**
* Gets the subject.
* @return the subject
*/
public String getSubject() {
return _subject;
}
/**
* Sets the subject.
* @param subject the subject
*/
public void setSubject(String subject) {
_subject = Val.chkStr(subject);
}
/**
* Gets the to E-Mail address.
* @return the to address
*/
public String getToAddress() {
return _toAddress;
}
/**
* Sets the to E-Mail address.
* <br/>The address is used to immediately set the recipients. The address
* string is tokenized with delimiters:
* <br/> semi-colon comma space
* @param address the to address
*/
public void setToAddress(String address) {
_toAddress = Val.chkStr(address);
getRecipients().clear();
getRecipients().addDelimited(getToAddress());
}
/**
* Appends custom properties.
* @param props custom properties
*/
public void appendProperties(Map<String, String> props) {
this._props.putAll(props);
}
// methods =====================================================================
/**
* Makes an Internet E-Mail address.
* @param address the E-Mail address string
* @return the Internet address
* @throws AddressException if the E-Mail address is invalid
*/
private InternetAddress makeAddress(String address)
throws AddressException {
return new InternetAddress(address);
}
/**
* Sends the E-Mail message.
* @throws AddressException if an E-Mail address is invalid
* @throws MessagingException if an exception occurs
*/
public void send() throws AddressException, MessagingException {
// setup the mail server properties
Properties props = new Properties();
props.put("mail.smtp.host",getHost());
if (getPort() > 0) {
props.put("mail.smtp.port",""+getPort());
}
props.putAll(_props);
// set up the message
Session session = Session.getDefaultInstance(props,_authenticator);
Message message = new MimeMessage(session);
message.setSubject(getSubject());
message.setContent(getBody(),getMimeType());
message.setFrom(makeAddress(escapeHtml4(stripControls(getFromAddress()))));
for (String sTo: getRecipients()) {
message.addRecipient(Message.RecipientType.TO,makeAddress(sTo));
}
// send the message
Transport.send(message);
}
}
| apache-2.0 |
dahlstrom-g/intellij-community | platform/platform-impl/src/com/intellij/openapi/updateSettings/impl/WhatsNewFileIconProvider.java | 889 | // Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.updateSettings.impl;
import com.intellij.icons.AllIcons;
import com.intellij.ide.FileIconProvider;
import com.intellij.openapi.fileEditor.impl.HTMLEditorProvider;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Iconable;
import com.intellij.openapi.vfs.VirtualFile;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
public final class WhatsNewFileIconProvider implements FileIconProvider {
@Override
public @Nullable Icon getIcon(@NotNull VirtualFile virtualFile, @Iconable.IconFlags int flags, @Nullable Project project) {
return HTMLEditorProvider.isHTMLEditor(virtualFile) ? AllIcons.General.Information : null;
}
}
| apache-2.0 |
webmate-io/webmate-sdk-java | src/main/java/com/testfabrik/webmate/javasdk/mailtest/TestMail.java | 2269 | package com.testfabrik.webmate.javasdk.mailtest;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.testfabrik.webmate.javasdk.JacksonMapper;
import com.testfabrik.webmate.javasdk.WebmateApiClientException;
import com.testfabrik.webmate.javasdk.testmgmt.Artifact;
import java.io.IOException;
import java.util.List;
import java.util.Objects;
public class TestMail {
private String from;
private List<String> to;
private JsonNode emailContent;
// for jackson
private TestMail() {}
public TestMail(String from, List<String> to, JsonNode emailContent) {
this.from = from;
this.to = to;
this.emailContent = emailContent;
}
/**
* Create TestMail from Artifact instance.
* @param artifact artifact to be interpreted as a TestMail.
* @return TestMail
* @throws com.testfabrik.webmate.javasdk.WebmateApiClientException if TestMail could not be instantiated
*/
public static TestMail fromArtifact(Artifact artifact) {
ObjectMapper mapper = JacksonMapper.getInstance();
try {
return mapper.readValue(artifact.getData().toString(), TestMail.class);
} catch (IOException e) {
throw new WebmateApiClientException("Error parsing TestMail json: " + e.getMessage(), e);
}
}
public String getFrom() {
return from;
}
public List<String> getTo() {
return to;
}
public JsonNode getEmailContent() {
return emailContent;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
TestMail testMail = (TestMail) o;
return Objects.equals(from, testMail.from) &&
Objects.equals(to, testMail.to) &&
Objects.equals(emailContent, testMail.emailContent);
}
@Override
public int hashCode() {
return Objects.hash(from, to, emailContent);
}
@Override
public String toString() {
return "TestMail{" +
"from='" + from + '\'' +
", to=" + to +
", emailContent=" + emailContent +
'}';
}
}
| apache-2.0 |
kev9318/java-bootcamp | Topic 4/src/userServiceExercise/UserServiceFactory.java | 208 | package userServiceExercise;
public class UserServiceFactory {
public static UserServiceImp getLocalService() {
return new UserServiceImp();
}
//for this exercise i only configured local service.
}
| apache-2.0 |
jentfoo/aws-sdk-java | aws-java-sdk-ec2/src/main/java/com/amazonaws/services/ec2/model/transform/ModifyReservedInstancesResultStaxUnmarshaller.java | 2609 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ec2.model.transform;
import javax.xml.stream.events.XMLEvent;
import javax.annotation.Generated;
import com.amazonaws.services.ec2.model.*;
import com.amazonaws.transform.Unmarshaller;
import com.amazonaws.transform.StaxUnmarshallerContext;
import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*;
/**
* ModifyReservedInstancesResult StAX Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ModifyReservedInstancesResultStaxUnmarshaller implements Unmarshaller<ModifyReservedInstancesResult, StaxUnmarshallerContext> {
public ModifyReservedInstancesResult unmarshall(StaxUnmarshallerContext context) throws Exception {
ModifyReservedInstancesResult modifyReservedInstancesResult = new ModifyReservedInstancesResult();
int originalDepth = context.getCurrentDepth();
int targetDepth = originalDepth + 1;
if (context.isStartOfDocument())
targetDepth += 1;
while (true) {
XMLEvent xmlEvent = context.nextEvent();
if (xmlEvent.isEndDocument())
return modifyReservedInstancesResult;
if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) {
if (context.testExpression("reservedInstancesModificationId", targetDepth)) {
modifyReservedInstancesResult.setReservedInstancesModificationId(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
} else if (xmlEvent.isEndElement()) {
if (context.getCurrentDepth() < originalDepth) {
return modifyReservedInstancesResult;
}
}
}
}
private static ModifyReservedInstancesResultStaxUnmarshaller instance;
public static ModifyReservedInstancesResultStaxUnmarshaller getInstance() {
if (instance == null)
instance = new ModifyReservedInstancesResultStaxUnmarshaller();
return instance;
}
}
| apache-2.0 |
hpehl/hal.next | app/src/main/java/org/jboss/hal/client/deployment/ServerGroupDeploymentColumn.java | 22927 | /*
* Copyright 2015-2016 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.hal.client.deployment;
import java.util.ArrayList;
import java.util.List;
import javax.inject.Inject;
import javax.inject.Provider;
import com.google.gwt.safehtml.shared.SafeHtml;
import com.google.gwt.user.client.rpc.AsyncCallback;
import com.google.web.bindery.event.shared.EventBus;
import elemental2.dom.HTMLElement;
import org.jboss.hal.ballroom.wizard.Wizard;
import org.jboss.hal.client.deployment.DeploymentTasks.AddServerGroupDeployment;
import org.jboss.hal.client.deployment.DeploymentTasks.AddUnmanagedDeployment;
import org.jboss.hal.client.deployment.DeploymentTasks.CheckDeployment;
import org.jboss.hal.client.deployment.DeploymentTasks.LoadContent;
import org.jboss.hal.client.deployment.DeploymentTasks.LoadDeploymentsFromRunningServer;
import org.jboss.hal.client.deployment.DeploymentTasks.ReadServerGroupDeployments;
import org.jboss.hal.client.deployment.DeploymentTasks.UploadOrReplace;
import org.jboss.hal.client.deployment.dialog.AddUnmanagedDialog;
import org.jboss.hal.client.deployment.dialog.DeployContentDialog2;
import org.jboss.hal.client.deployment.wizard.DeploymentContext;
import org.jboss.hal.client.deployment.wizard.NamesStep;
import org.jboss.hal.client.deployment.wizard.UploadDeploymentStep;
import org.jboss.hal.client.deployment.wizard.DeploymentState;
import org.jboss.hal.config.Environment;
import org.jboss.hal.core.CrudOperations;
import org.jboss.hal.core.SuccessfulOutcome;
import org.jboss.hal.core.deployment.Content;
import org.jboss.hal.core.deployment.Deployment.Status;
import org.jboss.hal.core.deployment.ServerGroupDeployment;
import org.jboss.hal.core.finder.ColumnAction;
import org.jboss.hal.core.finder.ColumnActionFactory;
import org.jboss.hal.core.finder.Finder;
import org.jboss.hal.core.finder.FinderColumn;
import org.jboss.hal.core.finder.ItemAction;
import org.jboss.hal.core.finder.ItemActionFactory;
import org.jboss.hal.core.finder.ItemDisplay;
import org.jboss.hal.core.finder.ItemMonitor;
import org.jboss.hal.core.finder.ItemsProvider;
import org.jboss.hal.core.mvp.Places;
import org.jboss.hal.core.runtime.server.ServerActions;
import org.jboss.hal.dmr.Composite;
import org.jboss.hal.dmr.CompositeResult;
import org.jboss.hal.dmr.Operation;
import org.jboss.hal.dmr.ResourceAddress;
import org.jboss.hal.dmr.dispatch.Dispatcher;
import org.jboss.hal.flow.FlowContext;
import org.jboss.hal.flow.Outcome;
import org.jboss.hal.flow.Progress;
import org.jboss.hal.flow.Task;
import org.jboss.hal.js.JsHelper;
import org.jboss.hal.meta.AddressTemplate;
import org.jboss.hal.meta.Metadata;
import org.jboss.hal.meta.MetadataRegistry;
import org.jboss.hal.meta.StatementContext;
import org.jboss.hal.meta.security.Constraint;
import org.jboss.hal.meta.token.NameTokens;
import org.jboss.hal.resources.Icons;
import org.jboss.hal.resources.Ids;
import org.jboss.hal.resources.Names;
import org.jboss.hal.resources.Resources;
import org.jboss.hal.spi.AsyncColumn;
import org.jboss.hal.spi.Footer;
import org.jboss.hal.spi.Message;
import org.jboss.hal.spi.MessageEvent;
import org.jboss.hal.spi.Requires;
import static java.util.stream.Collectors.toList;
import static org.jboss.hal.client.deployment.ContentColumn.CONTENT_ADDRESS;
import static org.jboss.hal.client.deployment.ContentColumn.CONTENT_TEMPLATE;
import static org.jboss.hal.client.deployment.ServerGroupDeploymentColumn.SERVER_GROUP_DEPLOYMENT_ADDRESS;
import static org.jboss.hal.client.deployment.wizard.DeploymentState.NAMES;
import static org.jboss.hal.client.deployment.wizard.DeploymentState.UPLOAD;
import static org.jboss.hal.core.deployment.Deployment.Status.OK;
import static org.jboss.hal.core.finder.FinderColumn.RefreshMode.CLEAR_SELECTION;
import static org.jboss.hal.core.finder.FinderColumn.RefreshMode.RESTORE_SELECTION;
import static org.jboss.hal.core.runtime.TopologyTasks.runningServers;
import static org.jboss.hal.dmr.ModelDescriptionConstants.*;
import static org.jboss.hal.dmr.ModelNodeHelper.properties;
import static org.jboss.hal.flow.Flow.series;
import static org.jboss.hal.resources.CSS.pfIcon;
/** The deployments of a server group. */
@AsyncColumn(Ids.SERVER_GROUP_DEPLOYMENT)
@Requires(value = {CONTENT_ADDRESS, SERVER_GROUP_DEPLOYMENT_ADDRESS}, recursive = false)
public class ServerGroupDeploymentColumn extends FinderColumn<ServerGroupDeployment> {
static final String SERVER_GROUP_DEPLOYMENT_ADDRESS = "/{selected.group}/deployment=*";
static final AddressTemplate SERVER_GROUP_DEPLOYMENT_TEMPLATE = AddressTemplate
.of(SERVER_GROUP_DEPLOYMENT_ADDRESS);
private final Environment environment;
private final EventBus eventBus;
private final Dispatcher dispatcher;
private final StatementContext statementContext;
private final MetadataRegistry metadataRegistry;
private final Provider<Progress> progress;
private final Resources resources;
@Inject
public ServerGroupDeploymentColumn(Finder finder,
ColumnActionFactory columnActionFactory,
ItemActionFactory itemActionFactory,
Environment environment,
EventBus eventBus,
Dispatcher dispatcher,
Places places,
CrudOperations crud,
ServerActions serverActions,
StatementContext statementContext,
MetadataRegistry metadataRegistry,
@Footer Provider<Progress> progress,
Resources resources) {
super(new FinderColumn.Builder<ServerGroupDeployment>(finder, Ids.SERVER_GROUP_DEPLOYMENT, Names.DEPLOYMENT)
.useFirstActionAsBreadcrumbHandler()
.pinnable()
.showCount()
.withFilter());
this.environment = environment;
this.eventBus = eventBus;
this.dispatcher = dispatcher;
this.statementContext = statementContext;
this.metadataRegistry = metadataRegistry;
this.progress = progress;
this.resources = resources;
List<ColumnAction<ServerGroupDeployment>> addActions = new ArrayList<>();
addActions.add(new ColumnAction.Builder<ServerGroupDeployment>(Ids.SERVER_GROUP_DEPLOYMENT_UPLOAD)
.title(resources.constants().uploadNewDeployment())
.handler(column -> uploadAndDeploy())
.constraint(Constraint.executable(SERVER_GROUP_DEPLOYMENT_TEMPLATE, ADD))
.build());
addActions.add(new ColumnAction.Builder<ServerGroupDeployment>(Ids.SERVER_GROUP_DEPLOYMENT_ADD)
.title(resources.constants().deployExistingContent())
.handler(column -> addDeploymentFromContentRepository())
.constraint(Constraint.executable(SERVER_GROUP_DEPLOYMENT_TEMPLATE, ADD))
.build());
addActions.add(new ColumnAction.Builder<ServerGroupDeployment>(Ids.SERVER_GROUP_DEPLOYMENT_UNMANAGED_ADD)
.title(resources.messages().addResourceTitle(Names.UNMANAGED_DEPLOYMENT))
.handler(column -> addUnmanaged())
.constraint(Constraint.executable(SERVER_GROUP_DEPLOYMENT_TEMPLATE, ADD))
.build());
addColumnActions(Ids.SERVER_GROUP_DEPLOYMENT_ADD_ACTIONS, pfIcon("add-circle-o"), resources.constants().add(),
addActions);
addColumnAction(columnActionFactory.refresh(Ids.SERVER_GROUP_DEPLOYMENT_REFRESH));
ItemsProvider<ServerGroupDeployment> itemsProvider = (context, callback) -> {
List<Task<FlowContext>> tasks = new ArrayList<>();
tasks.add(new ReadServerGroupDeployments(environment, dispatcher, statementContext.selectedServerGroup()));
tasks.addAll(runningServers(environment, dispatcher,
properties(SERVER_GROUP, statementContext.selectedServerGroup())));
tasks.add(new LoadDeploymentsFromRunningServer(environment, dispatcher));
series(new FlowContext(progress.get()), tasks)
.subscribe(new Outcome<FlowContext>() {
@Override
public void onError(FlowContext context, Throwable error) {
callback.onFailure(error);
}
@Override
public void onSuccess(FlowContext context) {
List<ServerGroupDeployment> serverGroupDeployments = context
.get(DeploymentTasks.SERVER_GROUP_DEPLOYMENTS);
callback.onSuccess(serverGroupDeployments);
}
});
};
setItemsProvider(itemsProvider);
// reuse the items provider to filter breadcrumb items
setBreadcrumbItemsProvider((context, callback) ->
itemsProvider.get(context, new AsyncCallback<List<ServerGroupDeployment>>() {
@Override
public void onFailure(Throwable caught) {
callback.onFailure(caught);
}
@Override
public void onSuccess(List<ServerGroupDeployment> result) {
// only running deployments w/ a reference server will show up in the breadcrumb dropdown
List<ServerGroupDeployment> deploymentsOnServer = result.stream()
.filter(ServerGroupDeployment::runningWithReferenceServer)
.collect(toList());
callback.onSuccess(deploymentsOnServer);
}
}));
setItemRenderer(item -> new ItemDisplay<ServerGroupDeployment>() {
@Override
public String getId() {
return Ids.serverGroupDeployment(statementContext.selectedServerGroup(), item.getName());
}
@Override
public String getTitle() {
return item.getName();
}
@Override
public String getTooltip() {
if (item.getDeployment() != null) {
if (item.getDeployment().getStatus() == Status.FAILED) {
return resources.constants().failed();
} else if (item.getDeployment().getStatus() == Status.STOPPED) {
return resources.constants().stopped();
} else if (item.getDeployment().getStatus() == OK) {
return resources.constants().activeLower();
} else {
return resources.constants().unknownState();
}
} else {
return item.isEnabled() ? resources.constants().enabled() : resources.constants()
.disabled();
}
}
@Override
public HTMLElement getIcon() {
if (item.getDeployment() != null) {
if (item.getDeployment().getStatus() == Status.FAILED) {
return Icons.error();
} else if (item.getDeployment().getStatus() == Status.STOPPED) {
return Icons.stopped();
} else if (item.getDeployment().getStatus() == OK) {
return Icons.ok();
} else {
return Icons.unknown();
}
} else {
return item.isEnabled() ? Icons.ok() : Icons.disabled();
}
}
@Override
public String getFilterData() {
return item.getName() + " " + (item.isEnabled() ? ENABLED : DISABLED);
}
@Override
public List<ItemAction<ServerGroupDeployment>> actions() {
List<ItemAction<ServerGroupDeployment>> actions = new ArrayList<>();
// view makes sense only for running deployments w/ a reference server
if (item.runningWithReferenceServer()) {
actions.add(itemActionFactory.view(NameTokens.SERVER_GROUP_DEPLOYMENT,
Ids.SERVER_GROUP, statementContext.selectedServerGroup(),
Ids.DEPLOYMENT, item.getName()));
}
if (item.isEnabled()) {
actions.add(new ItemAction.Builder<ServerGroupDeployment>()
.title(resources.constants().disable())
.handler(itm -> disable(itm))
.constraint(Constraint.executable(SERVER_GROUP_DEPLOYMENT_TEMPLATE, UNDEPLOY))
.build());
} else {
actions.add(new ItemAction.Builder<ServerGroupDeployment>()
.title(resources.constants().enable())
.handler(itm -> enable(itm))
.constraint(Constraint.executable(SERVER_GROUP_DEPLOYMENT_TEMPLATE, DEPLOY))
.build());
}
actions.add(new ItemAction.Builder<ServerGroupDeployment>()
.title(resources.constants().undeploy())
.handler(item -> crud.remove(Names.DEPLOYMENT, item.getName(), SERVER_GROUP_DEPLOYMENT_TEMPLATE,
() -> refresh(CLEAR_SELECTION)))
.constraint(Constraint.executable(SERVER_GROUP_DEPLOYMENT_TEMPLATE, REMOVE))
.build());
return actions;
}
});
setPreviewCallback(item -> new ServerGroupDeploymentPreview(this, item, places, resources, serverActions,
environment));
}
@Override
public void attach() {
super.attach();
if (JsHelper.supportsAdvancedUpload()) {
setOnDrop(event -> DeploymentTasks.uploadAndDeploy(this, environment, dispatcher, eventBus, progress,
event.dataTransfer.files, statementContext.selectedServerGroup(), resources));
}
}
private void uploadAndDeploy() {
Metadata metadata = metadataRegistry.lookup(SERVER_GROUP_DEPLOYMENT_TEMPLATE);
Wizard<DeploymentContext, DeploymentState> wizard = new Wizard.Builder<DeploymentContext, DeploymentState>(
resources.messages().addResourceTitle(resources.constants().content()), new DeploymentContext())
.addStep(UPLOAD, new UploadDeploymentStep(resources))
.addStep(NAMES, new NamesStep(environment, metadata, resources))
.onBack((context, currentState) -> currentState == NAMES ? UPLOAD : null)
.onNext((context, currentState) -> currentState == UPLOAD ? NAMES : null)
.stayOpenAfterFinish()
.onFinish((wzd, context) -> {
String name = context.name;
String runtimeName = context.runtimeName;
wzd.showProgress(resources.constants().deploymentInProgress(),
resources.messages().deploymentInProgress(name));
series(new FlowContext(progress.get()),
new CheckDeployment(dispatcher, name),
new UploadOrReplace(environment, dispatcher, name, runtimeName, context.file, false),
new AddServerGroupDeployment(environment, dispatcher, name, runtimeName,
statementContext.selectedServerGroup()))
.subscribe(new Outcome<FlowContext>() {
@Override
public void onError(FlowContext context, Throwable error) {
wzd.showError(resources.constants().deploymentError(),
resources.messages().deploymentError(name), error.getMessage());
}
@Override
public void onSuccess(FlowContext context) {
refresh(Ids.serverGroupDeployment(statementContext.selectedServerGroup(), name));
wzd.showSuccess(resources.constants().deploymentSuccessful(),
resources.messages().deploymentSuccessful(name),
resources.messages().view(Names.DEPLOYMENT),
cxt -> { /* nothing to do, content is already selected */ });
}
});
})
.build();
wizard.show();
}
private void addDeploymentFromContentRepository() {
Outcome<FlowContext> outcome = new Outcome<FlowContext>() {
@Override
public void onError(FlowContext context, Throwable error) {
MessageEvent.fire(eventBus, Message.error(resources.messages().loadContentError(), error.getMessage()));
}
@Override
public void onSuccess(FlowContext context) {
// extract content which is not deployed on statementContext.selectedServerGroup()
String serverGroup = statementContext.selectedServerGroup();
List<Content> content = context.pop();
List<Content> undeployedContentOnSelectedServerGroup = content.stream()
.filter(c -> !c.isDeployedTo(serverGroup))
.collect(toList());
if (undeployedContentOnSelectedServerGroup.isEmpty()) {
MessageEvent.fire(eventBus,
Message.warning(resources.messages().allContentAlreadyDeployedToServerGroup(serverGroup)));
} else {
new DeployContentDialog2(serverGroup, undeployedContentOnSelectedServerGroup, resources,
(sg, cnt, enable) -> {
List<Operation> operations = cnt.stream()
.map(c -> {
ResourceAddress resourceAddress = new ResourceAddress()
.add(SERVER_GROUP, serverGroup)
.add(DEPLOYMENT, c.getName());
return new Operation.Builder(resourceAddress, ADD)
.param(RUNTIME_NAME, c.getRuntimeName())
.param(ENABLED, enable)
.build();
})
.collect(toList());
if (enable) {
progress.get().reset();
progress.get().tick();
}
dispatcher.execute(new Composite(operations), (CompositeResult cr) -> {
if (enable) {
progress.get().finish();
}
refresh(Ids.serverGroupDeployment(serverGroup, cnt.get(0).getName()));
MessageEvent.fire(eventBus,
Message.success(resources.messages().contentDeployed2(serverGroup)));
});
}).show();
}
}
};
series(new FlowContext(progress.get()), new LoadContent(dispatcher)).subscribe(outcome);
}
private void addUnmanaged() {
Metadata metadata = metadataRegistry.lookup(CONTENT_TEMPLATE);
AddUnmanagedDialog dialog = new AddUnmanagedDialog(metadata, resources,
(name, model) -> {
if (model != null) {
String serverGroup = statementContext.selectedServerGroup();
String runtimeName = model.get(RUNTIME_NAME).asString();
series(new FlowContext(progress.get()),
new AddUnmanagedDeployment(dispatcher, name, model),
new AddServerGroupDeployment(environment, dispatcher, name, runtimeName, serverGroup))
.subscribe(new SuccessfulOutcome<FlowContext>(eventBus, resources) {
@Override
public void onSuccess(FlowContext context) {
refresh(Ids.serverGroupDeployment(serverGroup, name));
MessageEvent.fire(eventBus, Message.success(resources.messages()
.addResourceSuccess(Names.UNMANAGED_DEPLOYMENT, name)));
}
});
}
});
dialog.getForm().<String>getFormItem(NAME).addValidationHandler(createUniqueValidation());
dialog.show();
}
void enable(ServerGroupDeployment sgd) {
enableDisable(sgd, DEPLOY, resources.messages().deploymentEnabledSuccess(sgd.getName()));
}
void disable(ServerGroupDeployment sgd) {
enableDisable(sgd, UNDEPLOY, resources.messages().deploymentDisabledSuccess(sgd.getName()));
}
private void enableDisable(ServerGroupDeployment sgd, String operation, SafeHtml message) {
String id = Ids.serverGroupDeployment(sgd.getServerGroup(), sgd.getName());
ResourceAddress address = new ResourceAddress()
.add(SERVER_GROUP, sgd.getServerGroup())
.add(DEPLOYMENT, sgd.getName());
Operation op = new Operation.Builder(address, operation).build();
ItemMonitor.startProgress(id);
dispatcher.execute(op, result -> {
ItemMonitor.stopProgress(id);
refresh(RESTORE_SELECTION);
MessageEvent.fire(eventBus, Message.success(message));
});
}
}
| apache-2.0 |
googleapis/java-os-config | google-cloud-os-config/src/main/java/com/google/cloud/osconfig/v1/package-info.java | 2665 | /*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* A client to OS Config API
*
* <p>The interfaces provided are listed below, along with usage samples.
*
* <p>======================= OsConfigServiceClient =======================
*
* <p>Service Description: OS Config API
*
* <p>The OS Config service is a server-side component that you can use to manage package
* installations and patch jobs for virtual machine instances.
*
* <p>Sample for OsConfigServiceClient:
*
* <pre>{@code
* try (OsConfigServiceClient osConfigServiceClient = OsConfigServiceClient.create()) {
* PatchJobs.ExecutePatchJobRequest request =
* PatchJobs.ExecutePatchJobRequest.newBuilder()
* .setParent(ProjectName.of("[PROJECT]").toString())
* .setDescription("description-1724546052")
* .setInstanceFilter(PatchJobs.PatchInstanceFilter.newBuilder().build())
* .setPatchConfig(PatchJobs.PatchConfig.newBuilder().build())
* .setDuration(Duration.newBuilder().build())
* .setDryRun(true)
* .setDisplayName("displayName1714148973")
* .setRollout(PatchJobs.PatchRollout.newBuilder().build())
* .build();
* PatchJobs.PatchJob response = osConfigServiceClient.executePatchJob(request);
* }
* }</pre>
*
* <p>======================= OsConfigZonalServiceClient =======================
*
* <p>Service Description: Zonal OS Config API
*
* <p>The OS Config service is the server-side component that allows users to manage package
* installations and patch jobs for Compute Engine VM instances.
*
* <p>Sample for OsConfigZonalServiceClient:
*
* <pre>{@code
* try (OsConfigZonalServiceClient osConfigZonalServiceClient =
* OsConfigZonalServiceClient.create()) {
* OSPolicyAssignmentName name =
* OSPolicyAssignmentName.of("[PROJECT]", "[LOCATION]", "[OS_POLICY_ASSIGNMENT]");
* OSPolicyAssignment response = osConfigZonalServiceClient.getOSPolicyAssignment(name);
* }
* }</pre>
*/
@Generated("by gapic-generator-java")
package com.google.cloud.osconfig.v1;
import javax.annotation.Generated;
| apache-2.0 |
CheRut/chedmitry | chapter_006/src/test/java/ru/chedmitriy/monitor/ex1105/LinkedListThreadTest.java | 2604 | package ru.chedmitriy.monitor.ex1105;
import org.junit.Before;
import org.junit.Test;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.*;
/**
* ru.chedmitriy.multithreading.threads.monitor.ex1105.
*
* @author cheDmitry
* @version 1.0
* @since 29.10.2017
* тестируем класс LinkedListThread
*
*/
public class LinkedListThreadTest {
private LinkedListThread listThread;
private SomeThread someThread;
private AnotherThread anotherThread;
@Before
public void init() throws InterruptedException {
listThread = new LinkedListThread();
someThread = new SomeThread(listThread);
someThread.setName("1");
anotherThread = new AnotherThread(listThread);
someThread.start();
anotherThread.start();
someThread.join();
anotherThread.join();
}
/**
* Тестируем метод
* LinkedListThread#getNext(),
* а также проверяем первый элемент списка
* - т.е метод
* LinkedListThread#addFirst().
*
* @throws NullPointerException
*/
@Test
public void getNext() throws Exception {
String getFirstValue = (String) listThread.getNext(1);
assertTrue(getFirstValue.equals("twenty")
|| getFirstValue.equals("thirty"));
}
/**
* Тестируем метод
* LinkedListThread#remove(),
* а также проверяем последний добавленный элемент
* - т.е метод
* LinkedListThread#addLast()
*
* @throws NullPointerException
*/
@Test
public void remove() throws Exception {
String removingValue = listThread.remove();
assertTrue(removingValue.equals("six")
|| removingValue.equals("three"));
}
@Test
public void getSize() throws Exception {
assertThat(listThread.getSize(), is(8));
}
}
class SomeThread extends Thread {
private final LinkedListThread list;
SomeThread(LinkedListThread list) {
this.list = list;
}
@Override
public void run() {
list.addLast("one");
list.addLast("two");
list.addLast("three");
list.addFirst("thirty");
}
}
class AnotherThread extends Thread {
private final LinkedListThread list;
AnotherThread(LinkedListThread list) {
this.list = list;
}
@Override
public void run() {
list.addLast("four");
list.addLast("five");
list.addLast("six");
list.addFirst("twenty");
}
}
| apache-2.0 |
youngmonkeys/ezyfox | ezyfox-tools/src/main/java/com/tvd12/ezyfox/tool/EzyObjectInstanceRandom.java | 15956 | package com.tvd12.ezyfox.tool;
import java.lang.reflect.Array;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.function.Supplier;
import com.tvd12.ezyfox.function.EzyParamsFunction;
import com.tvd12.ezyfox.function.EzyVoidParamsFunction;
import com.tvd12.ezyfox.io.EzyStrings;
import com.tvd12.ezyfox.reflect.EzyClass;
import com.tvd12.ezyfox.reflect.EzyField;
import com.tvd12.ezyfox.reflect.EzyFields;
import com.tvd12.ezyfox.reflect.EzyGenerics;
@SuppressWarnings({"rawtypes", "unchecked"})
public class EzyObjectInstanceRandom {
protected final Map<Class<?>, Supplier<Object>> valueRandoms;
protected static final char[] DEFAULT_CHARACTERS = new char[] {
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',
'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',
'1', '2', '3', '4', '5', '6', '7', '8', '9'
};
protected static final String[] DEFAULT_STRINGS = new String[] {
"a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z",
"A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z",
"1", "2", "3", "4", "5", "6", "7", "8", "9"
};
public EzyObjectInstanceRandom() {
this.valueRandoms = defaultValueRandoms();
}
public Map<String, Object> randomObjectToMap(Class<?> clazz, boolean includeAllFields) {
Object map = randomObject(
clazz,
includeAllFields,
params -> new HashMap<>(),
params -> randomValue((Class<?>)params[0]),
params -> {
EzyField field = (EzyField)params[1];
((Map)params[0]).put(field.getName(), params[2]);
});
return (Map) map;
}
public Object randomValue(Class<?> valueType) {
Object randomValue = randomValue(valueType, params ->
randomObjectToMap((Class<?>)params[0], false)
);
return randomValue;
}
public <T> T randomObject(Class<T> clazz, boolean includeAllFields) {
Object instance = randomObject(
clazz,
includeAllFields,
params -> ((EzyClass)params[0]).newInstance(),
params -> randomObjectValue((Class<?>)params[0]),
params -> {
EzyField field = (EzyField)params[1];
field.getField().setAccessible(true);
field.set(params[0], params[2]);
});
return (T)instance;
}
public <T> List<T> randomObjectList(Class<T> itemClass, int size, boolean includeAllFields) {
List<T> list = new ArrayList<>();
for(int i = 0 ; i < size ; ++i)
list.add(randomObject(itemClass, includeAllFields));
return list;
}
public Object randomObjectValue(Class<?> valueType) {
Object randomValue = randomValue(valueType, params ->
randomObject((Class<?>)params[0], false)
);
return randomValue;
}
public Object randomObject(
Class<?> clazz,
boolean includeAllFields,
EzyParamsFunction<Object> newInstanceFunc,
EzyParamsFunction<Object> randomValueFunc, EzyVoidParamsFunction setValueFunc) {
EzyClass classProxy = new EzyClass(clazz);
List<EzyField> fields = includeAllFields
? classProxy.getFields()
: classProxy.getDeclaredFields();
Object instance = newInstanceFunc.apply(classProxy);
for(EzyField field : fields) {
Field javaField = field.getField();
if(Modifier.isStatic(javaField.getModifiers()))
continue;
Class<?> fieldType = javaField.getType();
Object randomValue = null;
if(fieldType.isAssignableFrom(Map.class))
randomValue = randomMapValue(field.getGenericType());
else if(fieldType.isAssignableFrom(List.class))
randomValue = randomCollectionValue(field.getGenericType());
else if(fieldType.isAssignableFrom(Set.class))
randomValue = randomCollectionValue(field.getGenericType());
else if(fieldType.isAssignableFrom(Collection.class))
randomValue = randomCollectionValue(field.getGenericType());
else
randomValue = randomValueFunc.apply(fieldType);
setValueFunc.apply(instance, field, randomValue);
}
return instance;
}
public Object randomValue(Class<?> valueType, EzyParamsFunction<Object> randomObjectFunc) {
Object randomValue = null;
Supplier<Object> random = valueRandoms.get(valueType);
if(random != null)
randomValue = random.get();
else if(valueType.isArray())
randomValue = randomArray(valueType.getComponentType());
else if(valueType.isEnum())
randomValue = randomEnumValue(valueType);
else
randomValue = randomObjectFunc.apply(valueType);
return randomValue;
}
public Object randomMapValue(Type mapType) {
try {
Object map = randomMapValue0(mapType);
return map;
}
catch(Exception e) {
return new HashMap<>();
}
}
protected Object randomMapValue0(Type mapType) {
Class[] keyValueTypes = EzyGenerics.getTwoGenericClassArguments(mapType);
Random random = new Random();
int maxEntries = 1 + random.nextInt(5);
Map<Object, Object> map = new HashMap<>();
for(int i = 0 ; i < maxEntries ; ++i) {
Object key = randomValue(keyValueTypes[0]);
Object value = randomValue(keyValueTypes[1]);
map.put(key, value);
}
return map;
}
public Object randomCollectionValue(Type collectionType) {
try {
Object collection = randomCollectionValue0(collectionType);
return collection;
}
catch(Exception e) {
return new ArrayList<>();
}
}
protected Object randomCollectionValue0(Type collectionType) {
Class itemType = EzyGenerics.getOneGenericClassArgument(collectionType);
Random random = new Random();
int maxItems = 1 + random.nextInt(5);
Collection<Object> collection = null;
ParameterizedType parameterizedType = (ParameterizedType)collectionType;
Class<?> rawType = (Class<?>) parameterizedType.getRawType();
if(rawType.isAssignableFrom(Set.class))
collection = new HashSet<>();
else
collection = new ArrayList<>();
for(int i = 0 ; i < maxItems ; ++i) {
Object value = randomValue(itemType);
collection.add(value);
}
return collection;
}
public Object randomArray(Class<?> itemType) {
Random random = new Random();
int maxLength = 1 + random.nextInt(5);
Object array = Array.newInstance(itemType, maxLength);
for(int i = 0 ; i < maxLength ; ++i) {
Object itemValue = randomValue(itemType);
Array.set(array, i, itemValue);
}
return array;
}
public Object randomEnumValue(Class<?> enumClass) {
try {
Object value = randomEnumValue0(enumClass);
return value;
}
catch(Exception e) {
return "";
}
}
protected Object randomEnumValue0(Class<?> enumClass) throws Exception {
Field field = enumClass.getDeclaredField("$VALUES");
field.setAccessible(true);
Object[] values = (Object[])field.get(null);
if(values.length == 0)
return "";
Random random = new Random();
int index = random.nextInt(values.length);
Object value = values[index];
return value;
}
public String randomValueScript(Class<?> type) {
String script = "";
if(Map.class.isAssignableFrom(type))
script = "new HashMap<>()";
else if(List.class.isAssignableFrom(type))
script = "new ArrayList<>()";
else if(Set.class.isAssignableFrom(type))
script = "new HashSet<>()";
else if(Collection.class.isAssignableFrom(type))
script = "new ArrayList<>()";
else if(BigDecimal.class.isAssignableFrom(type))
script = "new BigDecimal(10)";
else if(BigInteger.class.isAssignableFrom(type))
script = "new BigInteger(10)";
else if(java.util.Date.class.isAssignableFrom(type))
script = "new Date()";
else if(java.sql.Date.class.isAssignableFrom(type))
script = "new java.sql.Date()";
else if(LocalDate.class.isAssignableFrom(type))
script = "LocalDate.now()";
else if(LocalDateTime.class.isAssignableFrom(type))
script = "LocalDateTime.now()";
else
script = randomValue(type).toString();
if(byte.class.equals(type) || Byte.class.equals(type))
script = "(byte)" + script;
else if(char.class.equals(type) || Character.class.equals(type))
script = "'" + script + "'";
else if(double.class.equals(type) || Double.class.equals(type))
script = script + "D";
else if(float.class.equals(type) || Float.class.equals(type))
script = script + "F";
else if(long.class.equals(type) || Long.class.equals(type))
script = script + "L";
if(short.class.equals(type) || Short.class.equals(type))
script = "(short)" + script;
else if(String.class.equals(type))
script = EzyStrings.quote(script);
else if(type.isArray())
script = randomArrayScript(type.getComponentType());
else if(isCustomerClass(type))
script = "new" + type.getSimpleName() + "()";
return script;
}
public String randomArrayScript(Class<?> itemType) {
return new StringBuilder("new ")
.append(itemType.getSimpleName())
.append("[] {")
.append(randomValueScript(itemType))
.append("}")
.toString();
}
public String randomObjectFuncScript(Class<?> clazz) {
return randomObjectFuncScript(clazz, true);
}
public String randomObjectFuncScript(Class<?> clazz, boolean allFields) {
String bodyScript = randomObjectScript(clazz, null, allFields);
return new StringBuilder("protected ")
.append(clazz.getSimpleName()).append(" new")
.append(clazz.getSimpleName()).append("() {\n")
.append(EzyStringTool.tabAll(bodyScript, 1))
.append("\n}")
.toString();
}
public String randomObjectScript(Class<?> clazz, Set<Class<?>> customClasses) {
return randomObjectScript(clazz, customClasses, true);
}
public String randomObjectScript(
Class<?> clazz, Set<Class<?>> customClasses, boolean allFields) {
if(Throwable.class.isAssignableFrom(clazz))
return randomExceptionScript();
if(clazz.isInterface())
return randomInterfaceObjectScript(clazz);
if(Modifier.isAbstract(clazz.getModifiers()))
return randomAbstractObjectScript(clazz);
return randomClassObjectScript(clazz, customClasses, allFields);
}
public String randomExceptionScript() {
return new StringBuilder()
.append("Exception e = new Exception(\"test\");")
.append("\nreturn e;")
.toString();
}
public String randomInterfaceObjectScript(Class<?> intf) {
return new StringBuilder(intf.getSimpleName())
.append(" v = mock(")
.append(intf.getSimpleName()).append(".class);")
.append("\nreturn v;")
.toString();
}
public String randomAbstractObjectScript(Class<?> clazz) {
return new StringBuilder(clazz.getSimpleName())
.append(" v = spy(")
.append(clazz.getSimpleName()).append(".class);")
.append("\nreturn v;")
.toString();
}
public String randomClassObjectScript(
Class<?> clazz, Set<Class<?>> customClasses, boolean allFields) {
StringBuilder builder = new StringBuilder()
.append(clazz.getSimpleName())
.append(" v = new ")
.append(clazz.getSimpleName()).append("();\n");
List<Field> fields = allFields
? EzyFields.getFields(clazz) : EzyFields.getDeclaredFields(clazz);
for(Field field : fields) {
if(Modifier.isStatic(field.getModifiers()))
continue;
EzyField f = new EzyField(field);
Class<?> fieldType = field.getType();
String randomValue = randomValueScript(fieldType);
builder.append("v.")
.append(f.getSetterMethod())
.append("(").append(randomValue).append(");");
if(isCustomerClass(fieldType)) {
if(customClasses != null)
customClasses.add(fieldType);
builder.append(" // customer class");
}
builder.append("\n");
}
return builder.append("return v;").toString();
}
protected boolean isCustomerClass(Class<?> clazz) {
return EzyToolTypes.isCustomerClass(clazz);
}
protected Map<Class<?>, Supplier<Object>> defaultValueRandoms() {
Map<Class<?>, Supplier<Object>> randoms = new HashMap<>();
randoms.put(boolean.class, () -> {
Random random = new Random();
Object value = random.nextBoolean();
return value;
});
randoms.put(byte.class, () -> {
Random random = new Random();
Object value = (byte)random.nextInt(125);
return value;
});
randoms.put(char.class, () -> {
Random random = new Random();
int index = random.nextInt(DEFAULT_CHARACTERS.length);
Object value = DEFAULT_CHARACTERS[index];
return value;
});
randoms.put(double.class, () -> {
Random random = new Random();
Object value = (double)random.nextInt(125);
return value;
});
randoms.put(float.class, () -> {
Random random = new Random();
Object value = (float)random.nextInt(125);
return value;
});
randoms.put(int.class, () -> {
Random random = new Random();
Object value = random.nextInt(125);
return value;
});
randoms.put(long.class, () -> {
Random random = new Random();
Object value = (long)random.nextInt(125);
return value;
});
randoms.put(short.class, () -> {
Random random = new Random();
Object value = (short)random.nextInt(125);
return value;
});
randoms.put(Boolean.class, () -> {
Random random = new Random();
Object value = random.nextBoolean();
return value;
});
randoms.put(Byte.class, () -> {
Random random = new Random();
Object value = (byte)random.nextInt(125);
return value;
});
randoms.put(Character.class, () -> {
Random random = new Random();
int index = random.nextInt(DEFAULT_CHARACTERS.length);
Object value = DEFAULT_CHARACTERS[index];
return value;
});
randoms.put(Double.class, () -> {
Random random = new Random();
Object value = (double)random.nextInt(125);
return value;
});
randoms.put(Float.class, () -> {
Random random = new Random();
Object value = (float)random.nextInt(125);
return value;
});
randoms.put(Integer.class, () -> {
Random random = new Random();
Object value = random.nextInt(125);
return value;
});
randoms.put(Long.class, () -> {
Random random = new Random();
Object value = (long)random.nextInt(125);
return value;
});
randoms.put(Short.class, () -> {
Random random = new Random();
Object value = (short)random.nextInt(125);
return value;
});
randoms.put(String.class, () -> {
Random random = new Random();
int randomLength = 3 + random.nextInt(DEFAULT_STRINGS.length - 3);
StringBuilder builder = new StringBuilder();
for(int i = 0 ; i < randomLength ; ++i) {
int index = random.nextInt(DEFAULT_STRINGS.length);
builder.append(DEFAULT_STRINGS[index]);
}
return builder.toString();
});
randoms.put(Number.class, () -> {
Random random = new Random();
Object value = random.nextInt(125);
return value;
});
randoms.put(BigDecimal.class, () -> {
return new BigDecimal(10L);
});
randoms.put(BigInteger.class, () -> {
return new BigInteger("10");
});
randoms.put(Date.class, () -> {
Date now = new Date();
long nowMilis = now.getTime();
Random random = new Random();
int nextDays = 1 + random.nextInt(5);
long nextMilis = nowMilis + nextDays * 24 * 60 * 60 * 1000;
Date nextTime = new Date(nextMilis);
return nextTime;
});
randoms.put(java.sql.Date.class, () -> {
Date now = new Date();
long nowMilis = now.getTime();
Random random = new Random();
int nextDays = 1 + random.nextInt(5);
long nextMilis = nowMilis + nextDays * 24 * 60 * 60 * 1000;
java.sql.Date nextTime = new java.sql.Date(nextMilis);
return nextTime;
});
randoms.put(LocalDate.class, () -> {
return LocalDate.now();
});
randoms.put(LocalDateTime.class, () -> {
return LocalDateTime.now();
});
return randoms;
}
}
| apache-2.0 |
jentfoo/aws-sdk-java | aws-java-sdk-transfer/src/main/java/com/amazonaws/services/transfer/model/transform/IdentityProviderDetailsJsonUnmarshaller.java | 3082 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.transfer.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.transfer.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* IdentityProviderDetails JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class IdentityProviderDetailsJsonUnmarshaller implements Unmarshaller<IdentityProviderDetails, JsonUnmarshallerContext> {
public IdentityProviderDetails unmarshall(JsonUnmarshallerContext context) throws Exception {
IdentityProviderDetails identityProviderDetails = new IdentityProviderDetails();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return null;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("Url", targetDepth)) {
context.nextToken();
identityProviderDetails.setUrl(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("InvocationRole", targetDepth)) {
context.nextToken();
identityProviderDetails.setInvocationRole(context.getUnmarshaller(String.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return identityProviderDetails;
}
private static IdentityProviderDetailsJsonUnmarshaller instance;
public static IdentityProviderDetailsJsonUnmarshaller getInstance() {
if (instance == null)
instance = new IdentityProviderDetailsJsonUnmarshaller();
return instance;
}
}
| apache-2.0 |
vaclav/voicemenu | languages/jetbrains.mps.samples.VoiceMenu/source_gen/jetbrains/mps/samples/VoiceMenu/editor/HangUp_Editor.java | 489 | package jetbrains.mps.samples.VoiceMenu.editor;
/*Generated by MPS */
import jetbrains.mps.nodeEditor.DefaultNodeEditor;
import jetbrains.mps.openapi.editor.cells.EditorCell;
import jetbrains.mps.openapi.editor.EditorContext;
import org.jetbrains.mps.openapi.model.SNode;
public class HangUp_Editor extends DefaultNodeEditor {
public EditorCell createEditorCell(EditorContext editorContext, SNode node) {
return new HangUp_EditorBuilder_a(editorContext, node).createCell();
}
}
| apache-2.0 |
consulo/consulo-java | java-manifest/src/main/java/org/osmorc/manifest/lang/headerparser/HeaderAnnotator.java | 2436 | /*
* Copyright (c) 2007-2009, Osmorc Development Team
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or other
* materials provided with the distribution.
* * Neither the name of 'Osmorc Development Team' nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific
* prior written permission.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
* OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
* TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.osmorc.manifest.lang.headerparser;
import javax.annotation.Nonnull;
import com.intellij.lang.annotation.AnnotationHolder;
import com.intellij.lang.annotation.Annotator;
import com.intellij.psi.PsiElement;
import consulo.java.manifest.lang.headerparser.HeaderUtil;
import org.osmorc.manifest.lang.psi.HeaderValuePart;
/**
* @author Robert F. Beeger (robert@beeger.net)
*/
public class HeaderAnnotator implements Annotator {
public void annotate(@Nonnull PsiElement psiElement, @Nonnull AnnotationHolder holder) {
if (psiElement instanceof HeaderValuePart) {
HeaderValuePart headerValue = (HeaderValuePart)psiElement;
HeaderParser headerParser = HeaderUtil.getHeaderParser(headerValue);
if (headerParser != null) {
headerParser.annotate(headerValue, holder);
}
}
}
}
| apache-2.0 |
Dissem/Jabit | repositories/src/main/java/ch/dissem/bitmessage/repository/JdbcNodeRegistry.java | 6054 | package ch.dissem.bitmessage.repository;
import ch.dissem.bitmessage.entity.valueobject.NetworkAddress;
import ch.dissem.bitmessage.exception.ApplicationException;
import ch.dissem.bitmessage.ports.NodeRegistry;
import ch.dissem.bitmessage.utils.Collections;
import ch.dissem.bitmessage.utils.SqlStrings;
import ch.dissem.bitmessage.utils.Strings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.*;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static ch.dissem.bitmessage.ports.NodeRegistryHelper.loadStableNodes;
import static ch.dissem.bitmessage.utils.UnixTime.*;
public class JdbcNodeRegistry extends JdbcHelper implements NodeRegistry {
private static final Logger LOG = LoggerFactory.getLogger(JdbcNodeRegistry.class);
private Map<Long, Set<NetworkAddress>> stableNodes;
public JdbcNodeRegistry(JdbcConfig config) {
super(config);
cleanUp();
}
private void cleanUp() {
try (
Connection connection = config.getConnection();
PreparedStatement ps = connection.prepareStatement(
"DELETE FROM Node WHERE time<?")
) {
ps.setLong(1, now(-28 * DAY));
ps.executeUpdate();
} catch (SQLException e) {
LOG.error(e.getMessage(), e);
}
}
private NetworkAddress loadExisting(NetworkAddress node) {
String query =
"SELECT stream, address, port, services, time" +
" FROM Node" +
" WHERE stream = " + node.getStream() +
" AND address = X'" + Strings.hex(node.getIPv6()) + "'" +
" AND port = " + node.getPort();
try (
Connection connection = config.getConnection();
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(query)
) {
if (rs.next()) {
return new NetworkAddress.Builder()
.stream(rs.getLong("stream"))
.ipv6(rs.getBytes("address"))
.port(rs.getInt("port"))
.services(rs.getLong("services"))
.time(rs.getLong("time"))
.build();
} else {
return null;
}
} catch (Exception e) {
LOG.error(e.getMessage(), e);
throw new ApplicationException(e);
}
}
@Override
public List<NetworkAddress> getKnownAddresses(int limit, long... streams) {
List<NetworkAddress> result = new LinkedList<>();
String query =
"SELECT stream, address, port, services, time" +
" FROM Node WHERE stream IN (" + SqlStrings.join(streams) + ")" +
" ORDER BY TIME DESC" +
" LIMIT " + limit;
try (
Connection connection = config.getConnection();
Statement stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(query)
) {
while (rs.next()) {
result.add(
new NetworkAddress.Builder()
.stream(rs.getLong("stream"))
.ipv6(rs.getBytes("address"))
.port(rs.getInt("port"))
.services(rs.getLong("services"))
.time(rs.getLong("time"))
.build()
);
}
} catch (Exception e) {
LOG.error(e.getMessage(), e);
throw new ApplicationException(e);
}
if (result.isEmpty()) {
synchronized (this) {
if (stableNodes == null) {
stableNodes = loadStableNodes();
}
}
for (long stream : streams) {
Set<NetworkAddress> nodes = stableNodes.get(stream);
if (nodes != null && !nodes.isEmpty()) {
result.add(Collections.selectRandom(nodes));
}
}
}
return result;
}
@Override
public void offerAddresses(List<NetworkAddress> nodes) {
cleanUp();
nodes.stream()
.filter(node -> node.getTime() < now(+2 * MINUTE) && node.getTime() > now(-28 * DAY))
.forEach(node -> {
synchronized (this) {
NetworkAddress existing = loadExisting(node);
if (existing == null) {
insert(node);
} else if (node.getTime() > existing.getTime()) {
update(node);
}
}
});
}
private void insert(NetworkAddress node) {
try (
Connection connection = config.getConnection();
PreparedStatement ps = connection.prepareStatement(
"INSERT INTO Node (stream, address, port, services, time) " +
"VALUES (?, ?, ?, ?, ?)")
) {
ps.setLong(1, node.getStream());
ps.setBytes(2, node.getIPv6());
ps.setInt(3, node.getPort());
ps.setLong(4, node.getServices());
ps.setLong(5, node.getTime());
ps.executeUpdate();
} catch (SQLException e) {
LOG.error(e.getMessage(), e);
}
}
private void update(NetworkAddress node) {
try (
Connection connection = config.getConnection();
PreparedStatement ps = connection.prepareStatement(
"UPDATE Node SET services=?, time=? WHERE stream=? AND address=? AND port=?")
) {
ps.setLong(1, node.getServices());
ps.setLong(2, node.getTime());
ps.setLong(3, node.getStream());
ps.setBytes(4, node.getIPv6());
ps.setInt(5, node.getPort());
ps.executeUpdate();
} catch (SQLException e) {
LOG.error(e.getMessage(), e);
}
}
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-opsworkscm/src/main/java/com/amazonaws/services/opsworkscm/AWSOpsWorksCMAsync.java | 56942 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.opsworkscm;
import javax.annotation.Generated;
import com.amazonaws.services.opsworkscm.model.*;
/**
* Interface for accessing OpsWorksCM asynchronously. Each asynchronous method will return a Java Future object
* representing the asynchronous operation; overloads which accept an {@code AsyncHandler} can be used to receive
* notification when an asynchronous operation completes.
* <p>
* <b>Note:</b> Do not directly implement this interface, new methods are added to it regularly. Extend from
* {@link com.amazonaws.services.opsworkscm.AbstractAWSOpsWorksCMAsync} instead.
* </p>
* <p>
* <fullname>AWS OpsWorks CM</fullname>
* <p>
* AWS OpsWorks for configuration management (CM) is a service that runs and manages configuration management servers.
* You can use AWS OpsWorks CM to create and manage AWS OpsWorks for Chef Automate and AWS OpsWorks for Puppet
* Enterprise servers, and add or remove nodes for the servers to manage.
* </p>
* <p>
* <b>Glossary of terms</b>
* </p>
* <ul>
* <li>
* <p>
* <b>Server</b>: A configuration management server that can be highly-available. The configuration management server
* runs on an Amazon Elastic Compute Cloud (EC2) instance, and may use various other AWS services, such as Amazon
* Relational Database Service (RDS) and Elastic Load Balancing. A server is a generic abstraction over the
* configuration manager that you want to use, much like Amazon RDS. In AWS OpsWorks CM, you do not start or stop
* servers. After you create servers, they continue to run until they are deleted.
* </p>
* </li>
* <li>
* <p>
* <b>Engine</b>: The engine is the specific configuration manager that you want to use. Valid values in this release
* include <code>ChefAutomate</code> and <code>Puppet</code>.
* </p>
* </li>
* <li>
* <p>
* <b>Backup</b>: This is an application-level backup of the data that the configuration manager stores. AWS OpsWorks CM
* creates an S3 bucket for backups when you launch the first server. A backup maintains a snapshot of a server's
* configuration-related attributes at the time the backup starts.
* </p>
* </li>
* <li>
* <p>
* <b>Events</b>: Events are always related to a server. Events are written during server creation, when health checks
* run, when backups are created, when system maintenance is performed, etc. When you delete a server, the server's
* events are also deleted.
* </p>
* </li>
* <li>
* <p>
* <b>Account attributes</b>: Every account has attributes that are assigned in the AWS OpsWorks CM database. These
* attributes store information about configuration limits (servers, backups, etc.) and your customer account.
* </p>
* </li>
* </ul>
* <p>
* <b>Endpoints</b>
* </p>
* <p>
* AWS OpsWorks CM supports the following endpoints, all HTTPS. You must connect to one of the following endpoints. Your
* servers can only be accessed or managed within the endpoint in which they are created.
* </p>
* <ul>
* <li>
* <p>
* opsworks-cm.us-east-1.amazonaws.com
* </p>
* </li>
* <li>
* <p>
* opsworks-cm.us-east-2.amazonaws.com
* </p>
* </li>
* <li>
* <p>
* opsworks-cm.us-west-1.amazonaws.com
* </p>
* </li>
* <li>
* <p>
* opsworks-cm.us-west-2.amazonaws.com
* </p>
* </li>
* <li>
* <p>
* opsworks-cm.ap-northeast-1.amazonaws.com
* </p>
* </li>
* <li>
* <p>
* opsworks-cm.ap-southeast-1.amazonaws.com
* </p>
* </li>
* <li>
* <p>
* opsworks-cm.ap-southeast-2.amazonaws.com
* </p>
* </li>
* <li>
* <p>
* opsworks-cm.eu-central-1.amazonaws.com
* </p>
* </li>
* <li>
* <p>
* opsworks-cm.eu-west-1.amazonaws.com
* </p>
* </li>
* </ul>
* <p>
* For more information, see <a href="https://docs.aws.amazon.com/general/latest/gr/opsworks-service.html">AWS OpsWorks
* endpoints and quotas</a> in the AWS General Reference.
* </p>
* <p>
* <b>Throttling limits</b>
* </p>
* <p>
* All API operations allow for five requests per second with a burst of 10 requests per second.
* </p>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public interface AWSOpsWorksCMAsync extends AWSOpsWorksCM {
/**
* <p>
* Associates a new node with the server. For more information about how to disassociate a node, see
* <a>DisassociateNode</a>.
* </p>
* <p>
* On a Chef server: This command is an alternative to <code>knife bootstrap</code>.
* </p>
* <p>
* Example (Chef):
* <code>aws opsworks-cm associate-node --server-name <i>MyServer</i> --node-name <i>MyManagedNode</i> --engine-attributes "Name=<i>CHEF_ORGANIZATION</i>,Value=default" "Name=<i>CHEF_NODE_PUBLIC_KEY</i>,Value=<i>public-key-pem</i>"</code>
* </p>
* <p>
* On a Puppet server, this command is an alternative to the <code>puppet cert sign</code> command that signs a
* Puppet node CSR.
* </p>
* <p>
* Example (Puppet):
* <code>aws opsworks-cm associate-node --server-name <i>MyServer</i> --node-name <i>MyManagedNode</i> --engine-attributes "Name=<i>PUPPET_NODE_CSR</i>,Value=<i>csr-pem</i>"</code>
* </p>
* <p>
* A node can can only be associated with servers that are in a <code>HEALTHY</code> state. Otherwise, an
* <code>InvalidStateException</code> is thrown. A <code>ResourceNotFoundException</code> is thrown when the server
* does not exist. A <code>ValidationException</code> is raised when parameters of the request are not valid. The
* AssociateNode API call can be integrated into Auto Scaling configurations, AWS Cloudformation templates, or the
* user data of a server's instance.
* </p>
*
* @param associateNodeRequest
* @return A Java Future containing the result of the AssociateNode operation returned by the service.
* @sample AWSOpsWorksCMAsync.AssociateNode
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/AssociateNode" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<AssociateNodeResult> associateNodeAsync(AssociateNodeRequest associateNodeRequest);
/**
* <p>
* Associates a new node with the server. For more information about how to disassociate a node, see
* <a>DisassociateNode</a>.
* </p>
* <p>
* On a Chef server: This command is an alternative to <code>knife bootstrap</code>.
* </p>
* <p>
* Example (Chef):
* <code>aws opsworks-cm associate-node --server-name <i>MyServer</i> --node-name <i>MyManagedNode</i> --engine-attributes "Name=<i>CHEF_ORGANIZATION</i>,Value=default" "Name=<i>CHEF_NODE_PUBLIC_KEY</i>,Value=<i>public-key-pem</i>"</code>
* </p>
* <p>
* On a Puppet server, this command is an alternative to the <code>puppet cert sign</code> command that signs a
* Puppet node CSR.
* </p>
* <p>
* Example (Puppet):
* <code>aws opsworks-cm associate-node --server-name <i>MyServer</i> --node-name <i>MyManagedNode</i> --engine-attributes "Name=<i>PUPPET_NODE_CSR</i>,Value=<i>csr-pem</i>"</code>
* </p>
* <p>
* A node can can only be associated with servers that are in a <code>HEALTHY</code> state. Otherwise, an
* <code>InvalidStateException</code> is thrown. A <code>ResourceNotFoundException</code> is thrown when the server
* does not exist. A <code>ValidationException</code> is raised when parameters of the request are not valid. The
* AssociateNode API call can be integrated into Auto Scaling configurations, AWS Cloudformation templates, or the
* user data of a server's instance.
* </p>
*
* @param associateNodeRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the AssociateNode operation returned by the service.
* @sample AWSOpsWorksCMAsyncHandler.AssociateNode
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/AssociateNode" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<AssociateNodeResult> associateNodeAsync(AssociateNodeRequest associateNodeRequest,
com.amazonaws.handlers.AsyncHandler<AssociateNodeRequest, AssociateNodeResult> asyncHandler);
/**
* <p>
* Creates an application-level backup of a server. While the server is in the <code>BACKING_UP</code> state, the
* server cannot be changed, and no additional backup can be created.
* </p>
* <p>
* Backups can be created for servers in <code>RUNNING</code>, <code>HEALTHY</code>, and <code>UNHEALTHY</code>
* states. By default, you can create a maximum of 50 manual backups.
* </p>
* <p>
* This operation is asynchronous.
* </p>
* <p>
* A <code>LimitExceededException</code> is thrown when the maximum number of manual backups is reached. An
* <code>InvalidStateException</code> is thrown when the server is not in any of the following states: RUNNING,
* HEALTHY, or UNHEALTHY. A <code>ResourceNotFoundException</code> is thrown when the server is not found. A
* <code>ValidationException</code> is thrown when parameters of the request are not valid.
* </p>
*
* @param createBackupRequest
* @return A Java Future containing the result of the CreateBackup operation returned by the service.
* @sample AWSOpsWorksCMAsync.CreateBackup
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/CreateBackup" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<CreateBackupResult> createBackupAsync(CreateBackupRequest createBackupRequest);
/**
* <p>
* Creates an application-level backup of a server. While the server is in the <code>BACKING_UP</code> state, the
* server cannot be changed, and no additional backup can be created.
* </p>
* <p>
* Backups can be created for servers in <code>RUNNING</code>, <code>HEALTHY</code>, and <code>UNHEALTHY</code>
* states. By default, you can create a maximum of 50 manual backups.
* </p>
* <p>
* This operation is asynchronous.
* </p>
* <p>
* A <code>LimitExceededException</code> is thrown when the maximum number of manual backups is reached. An
* <code>InvalidStateException</code> is thrown when the server is not in any of the following states: RUNNING,
* HEALTHY, or UNHEALTHY. A <code>ResourceNotFoundException</code> is thrown when the server is not found. A
* <code>ValidationException</code> is thrown when parameters of the request are not valid.
* </p>
*
* @param createBackupRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the CreateBackup operation returned by the service.
* @sample AWSOpsWorksCMAsyncHandler.CreateBackup
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/CreateBackup" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<CreateBackupResult> createBackupAsync(CreateBackupRequest createBackupRequest,
com.amazonaws.handlers.AsyncHandler<CreateBackupRequest, CreateBackupResult> asyncHandler);
/**
* <p>
* Creates and immedately starts a new server. The server is ready to use when it is in the <code>HEALTHY</code>
* state. By default, you can create a maximum of 10 servers.
* </p>
* <p>
* This operation is asynchronous.
* </p>
* <p>
* A <code>LimitExceededException</code> is thrown when you have created the maximum number of servers (10). A
* <code>ResourceAlreadyExistsException</code> is thrown when a server with the same name already exists in the
* account. A <code>ResourceNotFoundException</code> is thrown when you specify a backup ID that is not valid or is
* for a backup that does not exist. A <code>ValidationException</code> is thrown when parameters of the request are
* not valid.
* </p>
* <p>
* If you do not specify a security group by adding the <code>SecurityGroupIds</code> parameter, AWS OpsWorks
* creates a new security group.
* </p>
* <p>
* <i>Chef Automate:</i> The default security group opens the Chef server to the world on TCP port 443. If a KeyName
* is present, AWS OpsWorks enables SSH access. SSH is also open to the world on TCP port 22.
* </p>
* <p>
* <i>Puppet Enterprise:</i> The default security group opens TCP ports 22, 443, 4433, 8140, 8142, 8143, and 8170.
* If a KeyName is present, AWS OpsWorks enables SSH access. SSH is also open to the world on TCP port 22.
* </p>
* <p>
* By default, your server is accessible from any IP address. We recommend that you update your security group rules
* to allow access from known IP addresses and address ranges only. To edit security group rules, open Security
* Groups in the navigation pane of the EC2 management console.
* </p>
* <p>
* To specify your own domain for a server, and provide your own self-signed or CA-signed certificate and private
* key, specify values for <code>CustomDomain</code>, <code>CustomCertificate</code>, and
* <code>CustomPrivateKey</code>.
* </p>
*
* @param createServerRequest
* @return A Java Future containing the result of the CreateServer operation returned by the service.
* @sample AWSOpsWorksCMAsync.CreateServer
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/CreateServer" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<CreateServerResult> createServerAsync(CreateServerRequest createServerRequest);
/**
* <p>
* Creates and immedately starts a new server. The server is ready to use when it is in the <code>HEALTHY</code>
* state. By default, you can create a maximum of 10 servers.
* </p>
* <p>
* This operation is asynchronous.
* </p>
* <p>
* A <code>LimitExceededException</code> is thrown when you have created the maximum number of servers (10). A
* <code>ResourceAlreadyExistsException</code> is thrown when a server with the same name already exists in the
* account. A <code>ResourceNotFoundException</code> is thrown when you specify a backup ID that is not valid or is
* for a backup that does not exist. A <code>ValidationException</code> is thrown when parameters of the request are
* not valid.
* </p>
* <p>
* If you do not specify a security group by adding the <code>SecurityGroupIds</code> parameter, AWS OpsWorks
* creates a new security group.
* </p>
* <p>
* <i>Chef Automate:</i> The default security group opens the Chef server to the world on TCP port 443. If a KeyName
* is present, AWS OpsWorks enables SSH access. SSH is also open to the world on TCP port 22.
* </p>
* <p>
* <i>Puppet Enterprise:</i> The default security group opens TCP ports 22, 443, 4433, 8140, 8142, 8143, and 8170.
* If a KeyName is present, AWS OpsWorks enables SSH access. SSH is also open to the world on TCP port 22.
* </p>
* <p>
* By default, your server is accessible from any IP address. We recommend that you update your security group rules
* to allow access from known IP addresses and address ranges only. To edit security group rules, open Security
* Groups in the navigation pane of the EC2 management console.
* </p>
* <p>
* To specify your own domain for a server, and provide your own self-signed or CA-signed certificate and private
* key, specify values for <code>CustomDomain</code>, <code>CustomCertificate</code>, and
* <code>CustomPrivateKey</code>.
* </p>
*
* @param createServerRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the CreateServer operation returned by the service.
* @sample AWSOpsWorksCMAsyncHandler.CreateServer
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/CreateServer" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<CreateServerResult> createServerAsync(CreateServerRequest createServerRequest,
com.amazonaws.handlers.AsyncHandler<CreateServerRequest, CreateServerResult> asyncHandler);
/**
* <p>
* Deletes a backup. You can delete both manual and automated backups. This operation is asynchronous.
* </p>
* <p>
* An <code>InvalidStateException</code> is thrown when a backup deletion is already in progress. A
* <code>ResourceNotFoundException</code> is thrown when the backup does not exist. A
* <code>ValidationException</code> is thrown when parameters of the request are not valid.
* </p>
*
* @param deleteBackupRequest
* @return A Java Future containing the result of the DeleteBackup operation returned by the service.
* @sample AWSOpsWorksCMAsync.DeleteBackup
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/DeleteBackup" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<DeleteBackupResult> deleteBackupAsync(DeleteBackupRequest deleteBackupRequest);
/**
* <p>
* Deletes a backup. You can delete both manual and automated backups. This operation is asynchronous.
* </p>
* <p>
* An <code>InvalidStateException</code> is thrown when a backup deletion is already in progress. A
* <code>ResourceNotFoundException</code> is thrown when the backup does not exist. A
* <code>ValidationException</code> is thrown when parameters of the request are not valid.
* </p>
*
* @param deleteBackupRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the DeleteBackup operation returned by the service.
* @sample AWSOpsWorksCMAsyncHandler.DeleteBackup
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/DeleteBackup" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<DeleteBackupResult> deleteBackupAsync(DeleteBackupRequest deleteBackupRequest,
com.amazonaws.handlers.AsyncHandler<DeleteBackupRequest, DeleteBackupResult> asyncHandler);
/**
* <p>
* Deletes the server and the underlying AWS CloudFormation stacks (including the server's EC2 instance). When you
* run this command, the server state is updated to <code>DELETING</code>. After the server is deleted, it is no
* longer returned by <code>DescribeServer</code> requests. If the AWS CloudFormation stack cannot be deleted, the
* server cannot be deleted.
* </p>
* <p>
* This operation is asynchronous.
* </p>
* <p>
* An <code>InvalidStateException</code> is thrown when a server deletion is already in progress. A
* <code>ResourceNotFoundException</code> is thrown when the server does not exist. A
* <code>ValidationException</code> is raised when parameters of the request are not valid.
* </p>
* <p>
* </p>
*
* @param deleteServerRequest
* @return A Java Future containing the result of the DeleteServer operation returned by the service.
* @sample AWSOpsWorksCMAsync.DeleteServer
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/DeleteServer" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<DeleteServerResult> deleteServerAsync(DeleteServerRequest deleteServerRequest);
/**
* <p>
* Deletes the server and the underlying AWS CloudFormation stacks (including the server's EC2 instance). When you
* run this command, the server state is updated to <code>DELETING</code>. After the server is deleted, it is no
* longer returned by <code>DescribeServer</code> requests. If the AWS CloudFormation stack cannot be deleted, the
* server cannot be deleted.
* </p>
* <p>
* This operation is asynchronous.
* </p>
* <p>
* An <code>InvalidStateException</code> is thrown when a server deletion is already in progress. A
* <code>ResourceNotFoundException</code> is thrown when the server does not exist. A
* <code>ValidationException</code> is raised when parameters of the request are not valid.
* </p>
* <p>
* </p>
*
* @param deleteServerRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the DeleteServer operation returned by the service.
* @sample AWSOpsWorksCMAsyncHandler.DeleteServer
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/DeleteServer" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<DeleteServerResult> deleteServerAsync(DeleteServerRequest deleteServerRequest,
com.amazonaws.handlers.AsyncHandler<DeleteServerRequest, DeleteServerResult> asyncHandler);
/**
* <p>
* Describes your OpsWorks-CM account attributes.
* </p>
* <p>
* This operation is synchronous.
* </p>
*
* @param describeAccountAttributesRequest
* @return A Java Future containing the result of the DescribeAccountAttributes operation returned by the service.
* @sample AWSOpsWorksCMAsync.DescribeAccountAttributes
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/DescribeAccountAttributes"
* target="_top">AWS API Documentation</a>
*/
java.util.concurrent.Future<DescribeAccountAttributesResult> describeAccountAttributesAsync(
DescribeAccountAttributesRequest describeAccountAttributesRequest);
/**
* <p>
* Describes your OpsWorks-CM account attributes.
* </p>
* <p>
* This operation is synchronous.
* </p>
*
* @param describeAccountAttributesRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the DescribeAccountAttributes operation returned by the service.
* @sample AWSOpsWorksCMAsyncHandler.DescribeAccountAttributes
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/DescribeAccountAttributes"
* target="_top">AWS API Documentation</a>
*/
java.util.concurrent.Future<DescribeAccountAttributesResult> describeAccountAttributesAsync(
DescribeAccountAttributesRequest describeAccountAttributesRequest,
com.amazonaws.handlers.AsyncHandler<DescribeAccountAttributesRequest, DescribeAccountAttributesResult> asyncHandler);
/**
* <p>
* Describes backups. The results are ordered by time, with newest backups first. If you do not specify a BackupId
* or ServerName, the command returns all backups.
* </p>
* <p>
* This operation is synchronous.
* </p>
* <p>
* A <code>ResourceNotFoundException</code> is thrown when the backup does not exist. A
* <code>ValidationException</code> is raised when parameters of the request are not valid.
* </p>
*
* @param describeBackupsRequest
* @return A Java Future containing the result of the DescribeBackups operation returned by the service.
* @sample AWSOpsWorksCMAsync.DescribeBackups
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/DescribeBackups" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<DescribeBackupsResult> describeBackupsAsync(DescribeBackupsRequest describeBackupsRequest);
/**
* <p>
* Describes backups. The results are ordered by time, with newest backups first. If you do not specify a BackupId
* or ServerName, the command returns all backups.
* </p>
* <p>
* This operation is synchronous.
* </p>
* <p>
* A <code>ResourceNotFoundException</code> is thrown when the backup does not exist. A
* <code>ValidationException</code> is raised when parameters of the request are not valid.
* </p>
*
* @param describeBackupsRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the DescribeBackups operation returned by the service.
* @sample AWSOpsWorksCMAsyncHandler.DescribeBackups
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/DescribeBackups" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<DescribeBackupsResult> describeBackupsAsync(DescribeBackupsRequest describeBackupsRequest,
com.amazonaws.handlers.AsyncHandler<DescribeBackupsRequest, DescribeBackupsResult> asyncHandler);
/**
* <p>
* Describes events for a specified server. Results are ordered by time, with newest events first.
* </p>
* <p>
* This operation is synchronous.
* </p>
* <p>
* A <code>ResourceNotFoundException</code> is thrown when the server does not exist. A
* <code>ValidationException</code> is raised when parameters of the request are not valid.
* </p>
*
* @param describeEventsRequest
* @return A Java Future containing the result of the DescribeEvents operation returned by the service.
* @sample AWSOpsWorksCMAsync.DescribeEvents
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/DescribeEvents" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<DescribeEventsResult> describeEventsAsync(DescribeEventsRequest describeEventsRequest);
/**
* <p>
* Describes events for a specified server. Results are ordered by time, with newest events first.
* </p>
* <p>
* This operation is synchronous.
* </p>
* <p>
* A <code>ResourceNotFoundException</code> is thrown when the server does not exist. A
* <code>ValidationException</code> is raised when parameters of the request are not valid.
* </p>
*
* @param describeEventsRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the DescribeEvents operation returned by the service.
* @sample AWSOpsWorksCMAsyncHandler.DescribeEvents
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/DescribeEvents" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<DescribeEventsResult> describeEventsAsync(DescribeEventsRequest describeEventsRequest,
com.amazonaws.handlers.AsyncHandler<DescribeEventsRequest, DescribeEventsResult> asyncHandler);
/**
* <p>
* Returns the current status of an existing association or disassociation request.
* </p>
* <p>
* A <code>ResourceNotFoundException</code> is thrown when no recent association or disassociation request with the
* specified token is found, or when the server does not exist. A <code>ValidationException</code> is raised when
* parameters of the request are not valid.
* </p>
*
* @param describeNodeAssociationStatusRequest
* @return A Java Future containing the result of the DescribeNodeAssociationStatus operation returned by the
* service.
* @sample AWSOpsWorksCMAsync.DescribeNodeAssociationStatus
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/DescribeNodeAssociationStatus"
* target="_top">AWS API Documentation</a>
*/
java.util.concurrent.Future<DescribeNodeAssociationStatusResult> describeNodeAssociationStatusAsync(
DescribeNodeAssociationStatusRequest describeNodeAssociationStatusRequest);
/**
* <p>
* Returns the current status of an existing association or disassociation request.
* </p>
* <p>
* A <code>ResourceNotFoundException</code> is thrown when no recent association or disassociation request with the
* specified token is found, or when the server does not exist. A <code>ValidationException</code> is raised when
* parameters of the request are not valid.
* </p>
*
* @param describeNodeAssociationStatusRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the DescribeNodeAssociationStatus operation returned by the
* service.
* @sample AWSOpsWorksCMAsyncHandler.DescribeNodeAssociationStatus
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/DescribeNodeAssociationStatus"
* target="_top">AWS API Documentation</a>
*/
java.util.concurrent.Future<DescribeNodeAssociationStatusResult> describeNodeAssociationStatusAsync(
DescribeNodeAssociationStatusRequest describeNodeAssociationStatusRequest,
com.amazonaws.handlers.AsyncHandler<DescribeNodeAssociationStatusRequest, DescribeNodeAssociationStatusResult> asyncHandler);
/**
* <p>
* Lists all configuration management servers that are identified with your account. Only the stored results from
* Amazon DynamoDB are returned. AWS OpsWorks CM does not query other services.
* </p>
* <p>
* This operation is synchronous.
* </p>
* <p>
* A <code>ResourceNotFoundException</code> is thrown when the server does not exist. A
* <code>ValidationException</code> is raised when parameters of the request are not valid.
* </p>
*
* @param describeServersRequest
* @return A Java Future containing the result of the DescribeServers operation returned by the service.
* @sample AWSOpsWorksCMAsync.DescribeServers
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/DescribeServers" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<DescribeServersResult> describeServersAsync(DescribeServersRequest describeServersRequest);
/**
* <p>
* Lists all configuration management servers that are identified with your account. Only the stored results from
* Amazon DynamoDB are returned. AWS OpsWorks CM does not query other services.
* </p>
* <p>
* This operation is synchronous.
* </p>
* <p>
* A <code>ResourceNotFoundException</code> is thrown when the server does not exist. A
* <code>ValidationException</code> is raised when parameters of the request are not valid.
* </p>
*
* @param describeServersRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the DescribeServers operation returned by the service.
* @sample AWSOpsWorksCMAsyncHandler.DescribeServers
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/DescribeServers" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<DescribeServersResult> describeServersAsync(DescribeServersRequest describeServersRequest,
com.amazonaws.handlers.AsyncHandler<DescribeServersRequest, DescribeServersResult> asyncHandler);
/**
* <p>
* Disassociates a node from an AWS OpsWorks CM server, and removes the node from the server's managed nodes. After
* a node is disassociated, the node key pair is no longer valid for accessing the configuration manager's API. For
* more information about how to associate a node, see <a>AssociateNode</a>.
* </p>
* <p>
* A node can can only be disassociated from a server that is in a <code>HEALTHY</code> state. Otherwise, an
* <code>InvalidStateException</code> is thrown. A <code>ResourceNotFoundException</code> is thrown when the server
* does not exist. A <code>ValidationException</code> is raised when parameters of the request are not valid.
* </p>
*
* @param disassociateNodeRequest
* @return A Java Future containing the result of the DisassociateNode operation returned by the service.
* @sample AWSOpsWorksCMAsync.DisassociateNode
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/DisassociateNode" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<DisassociateNodeResult> disassociateNodeAsync(DisassociateNodeRequest disassociateNodeRequest);
/**
* <p>
* Disassociates a node from an AWS OpsWorks CM server, and removes the node from the server's managed nodes. After
* a node is disassociated, the node key pair is no longer valid for accessing the configuration manager's API. For
* more information about how to associate a node, see <a>AssociateNode</a>.
* </p>
* <p>
* A node can can only be disassociated from a server that is in a <code>HEALTHY</code> state. Otherwise, an
* <code>InvalidStateException</code> is thrown. A <code>ResourceNotFoundException</code> is thrown when the server
* does not exist. A <code>ValidationException</code> is raised when parameters of the request are not valid.
* </p>
*
* @param disassociateNodeRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the DisassociateNode operation returned by the service.
* @sample AWSOpsWorksCMAsyncHandler.DisassociateNode
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/DisassociateNode" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<DisassociateNodeResult> disassociateNodeAsync(DisassociateNodeRequest disassociateNodeRequest,
com.amazonaws.handlers.AsyncHandler<DisassociateNodeRequest, DisassociateNodeResult> asyncHandler);
/**
* <p>
* Exports a specified server engine attribute as a base64-encoded string. For example, you can export user data
* that you can use in EC2 to associate nodes with a server.
* </p>
* <p>
* This operation is synchronous.
* </p>
* <p>
* A <code>ValidationException</code> is raised when parameters of the request are not valid. A
* <code>ResourceNotFoundException</code> is thrown when the server does not exist. An
* <code>InvalidStateException</code> is thrown when the server is in any of the following states: CREATING,
* TERMINATED, FAILED or DELETING.
* </p>
*
* @param exportServerEngineAttributeRequest
* @return A Java Future containing the result of the ExportServerEngineAttribute operation returned by the service.
* @sample AWSOpsWorksCMAsync.ExportServerEngineAttribute
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/ExportServerEngineAttribute"
* target="_top">AWS API Documentation</a>
*/
java.util.concurrent.Future<ExportServerEngineAttributeResult> exportServerEngineAttributeAsync(
ExportServerEngineAttributeRequest exportServerEngineAttributeRequest);
/**
* <p>
* Exports a specified server engine attribute as a base64-encoded string. For example, you can export user data
* that you can use in EC2 to associate nodes with a server.
* </p>
* <p>
* This operation is synchronous.
* </p>
* <p>
* A <code>ValidationException</code> is raised when parameters of the request are not valid. A
* <code>ResourceNotFoundException</code> is thrown when the server does not exist. An
* <code>InvalidStateException</code> is thrown when the server is in any of the following states: CREATING,
* TERMINATED, FAILED or DELETING.
* </p>
*
* @param exportServerEngineAttributeRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the ExportServerEngineAttribute operation returned by the service.
* @sample AWSOpsWorksCMAsyncHandler.ExportServerEngineAttribute
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/ExportServerEngineAttribute"
* target="_top">AWS API Documentation</a>
*/
java.util.concurrent.Future<ExportServerEngineAttributeResult> exportServerEngineAttributeAsync(
ExportServerEngineAttributeRequest exportServerEngineAttributeRequest,
com.amazonaws.handlers.AsyncHandler<ExportServerEngineAttributeRequest, ExportServerEngineAttributeResult> asyncHandler);
/**
* <p>
* Returns a list of tags that are applied to the specified AWS OpsWorks for Chef Automate or AWS OpsWorks for
* Puppet Enterprise servers or backups.
* </p>
*
* @param listTagsForResourceRequest
* @return A Java Future containing the result of the ListTagsForResource operation returned by the service.
* @sample AWSOpsWorksCMAsync.ListTagsForResource
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/ListTagsForResource" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<ListTagsForResourceResult> listTagsForResourceAsync(ListTagsForResourceRequest listTagsForResourceRequest);
/**
* <p>
* Returns a list of tags that are applied to the specified AWS OpsWorks for Chef Automate or AWS OpsWorks for
* Puppet Enterprise servers or backups.
* </p>
*
* @param listTagsForResourceRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the ListTagsForResource operation returned by the service.
* @sample AWSOpsWorksCMAsyncHandler.ListTagsForResource
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/ListTagsForResource" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<ListTagsForResourceResult> listTagsForResourceAsync(ListTagsForResourceRequest listTagsForResourceRequest,
com.amazonaws.handlers.AsyncHandler<ListTagsForResourceRequest, ListTagsForResourceResult> asyncHandler);
/**
* <p>
* Restores a backup to a server that is in a <code>CONNECTION_LOST</code>, <code>HEALTHY</code>,
* <code>RUNNING</code>, <code>UNHEALTHY</code>, or <code>TERMINATED</code> state. When you run RestoreServer, the
* server's EC2 instance is deleted, and a new EC2 instance is configured. RestoreServer maintains the existing
* server endpoint, so configuration management of the server's client devices (nodes) should continue to work.
* </p>
* <p>
* Restoring from a backup is performed by creating a new EC2 instance. If restoration is successful, and the server
* is in a <code>HEALTHY</code> state, AWS OpsWorks CM switches traffic over to the new instance. After restoration
* is finished, the old EC2 instance is maintained in a <code>Running</code> or <code>Stopped</code> state, but is
* eventually terminated.
* </p>
* <p>
* This operation is asynchronous.
* </p>
* <p>
* An <code>InvalidStateException</code> is thrown when the server is not in a valid state. A
* <code>ResourceNotFoundException</code> is thrown when the server does not exist. A
* <code>ValidationException</code> is raised when parameters of the request are not valid.
* </p>
*
* @param restoreServerRequest
* @return A Java Future containing the result of the RestoreServer operation returned by the service.
* @sample AWSOpsWorksCMAsync.RestoreServer
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/RestoreServer" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<RestoreServerResult> restoreServerAsync(RestoreServerRequest restoreServerRequest);
/**
* <p>
* Restores a backup to a server that is in a <code>CONNECTION_LOST</code>, <code>HEALTHY</code>,
* <code>RUNNING</code>, <code>UNHEALTHY</code>, or <code>TERMINATED</code> state. When you run RestoreServer, the
* server's EC2 instance is deleted, and a new EC2 instance is configured. RestoreServer maintains the existing
* server endpoint, so configuration management of the server's client devices (nodes) should continue to work.
* </p>
* <p>
* Restoring from a backup is performed by creating a new EC2 instance. If restoration is successful, and the server
* is in a <code>HEALTHY</code> state, AWS OpsWorks CM switches traffic over to the new instance. After restoration
* is finished, the old EC2 instance is maintained in a <code>Running</code> or <code>Stopped</code> state, but is
* eventually terminated.
* </p>
* <p>
* This operation is asynchronous.
* </p>
* <p>
* An <code>InvalidStateException</code> is thrown when the server is not in a valid state. A
* <code>ResourceNotFoundException</code> is thrown when the server does not exist. A
* <code>ValidationException</code> is raised when parameters of the request are not valid.
* </p>
*
* @param restoreServerRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the RestoreServer operation returned by the service.
* @sample AWSOpsWorksCMAsyncHandler.RestoreServer
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/RestoreServer" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<RestoreServerResult> restoreServerAsync(RestoreServerRequest restoreServerRequest,
com.amazonaws.handlers.AsyncHandler<RestoreServerRequest, RestoreServerResult> asyncHandler);
/**
* <p>
* Manually starts server maintenance. This command can be useful if an earlier maintenance attempt failed, and the
* underlying cause of maintenance failure has been resolved. The server is in an <code>UNDER_MAINTENANCE</code>
* state while maintenance is in progress.
* </p>
* <p>
* Maintenance can only be started on servers in <code>HEALTHY</code> and <code>UNHEALTHY</code> states. Otherwise,
* an <code>InvalidStateException</code> is thrown. A <code>ResourceNotFoundException</code> is thrown when the
* server does not exist. A <code>ValidationException</code> is raised when parameters of the request are not valid.
* </p>
*
* @param startMaintenanceRequest
* @return A Java Future containing the result of the StartMaintenance operation returned by the service.
* @sample AWSOpsWorksCMAsync.StartMaintenance
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/StartMaintenance" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<StartMaintenanceResult> startMaintenanceAsync(StartMaintenanceRequest startMaintenanceRequest);
/**
* <p>
* Manually starts server maintenance. This command can be useful if an earlier maintenance attempt failed, and the
* underlying cause of maintenance failure has been resolved. The server is in an <code>UNDER_MAINTENANCE</code>
* state while maintenance is in progress.
* </p>
* <p>
* Maintenance can only be started on servers in <code>HEALTHY</code> and <code>UNHEALTHY</code> states. Otherwise,
* an <code>InvalidStateException</code> is thrown. A <code>ResourceNotFoundException</code> is thrown when the
* server does not exist. A <code>ValidationException</code> is raised when parameters of the request are not valid.
* </p>
*
* @param startMaintenanceRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the StartMaintenance operation returned by the service.
* @sample AWSOpsWorksCMAsyncHandler.StartMaintenance
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/StartMaintenance" target="_top">AWS
* API Documentation</a>
*/
java.util.concurrent.Future<StartMaintenanceResult> startMaintenanceAsync(StartMaintenanceRequest startMaintenanceRequest,
com.amazonaws.handlers.AsyncHandler<StartMaintenanceRequest, StartMaintenanceResult> asyncHandler);
/**
* <p>
* Applies tags to an AWS OpsWorks for Chef Automate or AWS OpsWorks for Puppet Enterprise server, or to server
* backups.
* </p>
*
* @param tagResourceRequest
* @return A Java Future containing the result of the TagResource operation returned by the service.
* @sample AWSOpsWorksCMAsync.TagResource
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/TagResource" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<TagResourceResult> tagResourceAsync(TagResourceRequest tagResourceRequest);
/**
* <p>
* Applies tags to an AWS OpsWorks for Chef Automate or AWS OpsWorks for Puppet Enterprise server, or to server
* backups.
* </p>
*
* @param tagResourceRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the TagResource operation returned by the service.
* @sample AWSOpsWorksCMAsyncHandler.TagResource
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/TagResource" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<TagResourceResult> tagResourceAsync(TagResourceRequest tagResourceRequest,
com.amazonaws.handlers.AsyncHandler<TagResourceRequest, TagResourceResult> asyncHandler);
/**
* <p>
* Removes specified tags from an AWS OpsWorks-CM server or backup.
* </p>
*
* @param untagResourceRequest
* @return A Java Future containing the result of the UntagResource operation returned by the service.
* @sample AWSOpsWorksCMAsync.UntagResource
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/UntagResource" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<UntagResourceResult> untagResourceAsync(UntagResourceRequest untagResourceRequest);
/**
* <p>
* Removes specified tags from an AWS OpsWorks-CM server or backup.
* </p>
*
* @param untagResourceRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the UntagResource operation returned by the service.
* @sample AWSOpsWorksCMAsyncHandler.UntagResource
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/UntagResource" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<UntagResourceResult> untagResourceAsync(UntagResourceRequest untagResourceRequest,
com.amazonaws.handlers.AsyncHandler<UntagResourceRequest, UntagResourceResult> asyncHandler);
/**
* <p>
* Updates settings for a server.
* </p>
* <p>
* This operation is synchronous.
* </p>
*
* @param updateServerRequest
* @return A Java Future containing the result of the UpdateServer operation returned by the service.
* @sample AWSOpsWorksCMAsync.UpdateServer
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/UpdateServer" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<UpdateServerResult> updateServerAsync(UpdateServerRequest updateServerRequest);
/**
* <p>
* Updates settings for a server.
* </p>
* <p>
* This operation is synchronous.
* </p>
*
* @param updateServerRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the UpdateServer operation returned by the service.
* @sample AWSOpsWorksCMAsyncHandler.UpdateServer
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/UpdateServer" target="_top">AWS API
* Documentation</a>
*/
java.util.concurrent.Future<UpdateServerResult> updateServerAsync(UpdateServerRequest updateServerRequest,
com.amazonaws.handlers.AsyncHandler<UpdateServerRequest, UpdateServerResult> asyncHandler);
/**
* <p>
* Updates engine-specific attributes on a specified server. The server enters the <code>MODIFYING</code> state when
* this operation is in progress. Only one update can occur at a time. You can use this command to reset a Chef
* server's public key (<code>CHEF_PIVOTAL_KEY</code>) or a Puppet server's admin password (
* <code>PUPPET_ADMIN_PASSWORD</code>).
* </p>
* <p>
* This operation is asynchronous.
* </p>
* <p>
* This operation can only be called for servers in <code>HEALTHY</code> or <code>UNHEALTHY</code> states.
* Otherwise, an <code>InvalidStateException</code> is raised. A <code>ResourceNotFoundException</code> is thrown
* when the server does not exist. A <code>ValidationException</code> is raised when parameters of the request are
* not valid.
* </p>
*
* @param updateServerEngineAttributesRequest
* @return A Java Future containing the result of the UpdateServerEngineAttributes operation returned by the
* service.
* @sample AWSOpsWorksCMAsync.UpdateServerEngineAttributes
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/UpdateServerEngineAttributes"
* target="_top">AWS API Documentation</a>
*/
java.util.concurrent.Future<UpdateServerEngineAttributesResult> updateServerEngineAttributesAsync(
UpdateServerEngineAttributesRequest updateServerEngineAttributesRequest);
/**
* <p>
* Updates engine-specific attributes on a specified server. The server enters the <code>MODIFYING</code> state when
* this operation is in progress. Only one update can occur at a time. You can use this command to reset a Chef
* server's public key (<code>CHEF_PIVOTAL_KEY</code>) or a Puppet server's admin password (
* <code>PUPPET_ADMIN_PASSWORD</code>).
* </p>
* <p>
* This operation is asynchronous.
* </p>
* <p>
* This operation can only be called for servers in <code>HEALTHY</code> or <code>UNHEALTHY</code> states.
* Otherwise, an <code>InvalidStateException</code> is raised. A <code>ResourceNotFoundException</code> is thrown
* when the server does not exist. A <code>ValidationException</code> is raised when parameters of the request are
* not valid.
* </p>
*
* @param updateServerEngineAttributesRequest
* @param asyncHandler
* Asynchronous callback handler for events in the lifecycle of the request. Users can provide an
* implementation of the callback methods in this interface to receive notification of successful or
* unsuccessful completion of the operation.
* @return A Java Future containing the result of the UpdateServerEngineAttributes operation returned by the
* service.
* @sample AWSOpsWorksCMAsyncHandler.UpdateServerEngineAttributes
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/opsworkscm-2016-11-01/UpdateServerEngineAttributes"
* target="_top">AWS API Documentation</a>
*/
java.util.concurrent.Future<UpdateServerEngineAttributesResult> updateServerEngineAttributesAsync(
UpdateServerEngineAttributesRequest updateServerEngineAttributesRequest,
com.amazonaws.handlers.AsyncHandler<UpdateServerEngineAttributesRequest, UpdateServerEngineAttributesResult> asyncHandler);
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-forecast/src/main/java/com/amazonaws/services/forecast/model/transform/MetricsJsonUnmarshaller.java | 3538 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.forecast.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.forecast.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* Metrics JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class MetricsJsonUnmarshaller implements Unmarshaller<Metrics, JsonUnmarshallerContext> {
public Metrics unmarshall(JsonUnmarshallerContext context) throws Exception {
Metrics metrics = new Metrics();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return null;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("RMSE", targetDepth)) {
context.nextToken();
metrics.setRMSE(context.getUnmarshaller(Double.class).unmarshall(context));
}
if (context.testExpression("WeightedQuantileLosses", targetDepth)) {
context.nextToken();
metrics.setWeightedQuantileLosses(new ListUnmarshaller<WeightedQuantileLoss>(WeightedQuantileLossJsonUnmarshaller.getInstance())
.unmarshall(context));
}
if (context.testExpression("ErrorMetrics", targetDepth)) {
context.nextToken();
metrics.setErrorMetrics(new ListUnmarshaller<ErrorMetric>(ErrorMetricJsonUnmarshaller.getInstance())
.unmarshall(context));
}
if (context.testExpression("AverageWeightedQuantileLoss", targetDepth)) {
context.nextToken();
metrics.setAverageWeightedQuantileLoss(context.getUnmarshaller(Double.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return metrics;
}
private static MetricsJsonUnmarshaller instance;
public static MetricsJsonUnmarshaller getInstance() {
if (instance == null)
instance = new MetricsJsonUnmarshaller();
return instance;
}
}
| apache-2.0 |
ilivoo/ilivoo | memcached/src/main/java/com/ilivoo/memcached/MemcachedManager.java | 1923 | package com.ilivoo.memcached;
import java.net.InetSocketAddress;
import com.thimbleware.jmemcached.CacheImpl;
import com.thimbleware.jmemcached.Key;
import com.thimbleware.jmemcached.LocalCacheElement;
import com.thimbleware.jmemcached.MemCacheDaemon;
import com.thimbleware.jmemcached.storage.CacheStorage;
import com.thimbleware.jmemcached.storage.bytebuffer.BlockStorageCacheStorage;
import com.thimbleware.jmemcached.storage.bytebuffer.BlockStoreFactory;
import com.thimbleware.jmemcached.storage.bytebuffer.ByteBufferBlockStore;
import com.thimbleware.jmemcached.storage.hash.ConcurrentLinkedHashMap;
import com.thimbleware.jmemcached.storage.mmap.MemoryMappedBlockStore;
public class MemcachedManager {
@SuppressWarnings("rawtypes")
public static void main(String[] args) throws Exception {
boolean blockStore = false;
boolean memoryMapped = false;
final MemCacheDaemon<LocalCacheElement> daemon = new MemCacheDaemon<LocalCacheElement>();
CacheStorage<Key, LocalCacheElement> storage;
if (blockStore) {
BlockStoreFactory blockStoreFactory = ByteBufferBlockStore.getFactory();
storage = new BlockStorageCacheStorage(8, (int) 1024000, 8, 10240000, 1000, blockStoreFactory);
} else if (memoryMapped) {
BlockStoreFactory blockStoreFactory = MemoryMappedBlockStore.getFactory();
storage = new BlockStorageCacheStorage(8, (int) 1024000, 8, 1024000, 1000, blockStoreFactory);
} else {
storage = ConcurrentLinkedHashMap.create(ConcurrentLinkedHashMap.EvictionPolicy.FIFO, 1000, 10240000);
}
daemon.setCache(new CacheImpl(storage));
daemon.setBinary(true);
daemon.setAddr(new InetSocketAddress(12121));
daemon.setIdleTime(0);
daemon.setVerbose(false);
daemon.start();
Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
public void run() {
if (daemon.isRunning())
daemon.stop();
}
}));
}
}
| apache-2.0 |
LukaszPiskadlo/Pat2017_Lukasz_Piskadlo-Backend | src/main/java/com/lukaszpiskadlo/Repository/UserRepository.java | 211 | package com.lukaszpiskadlo.Repository;
import com.lukaszpiskadlo.Model.User;
import org.springframework.data.jpa.repository.JpaRepository;
public interface UserRepository extends JpaRepository<User, Long> {
}
| apache-2.0 |
xiehan/zoara-server | src/zoara/sfs2x/extension/db/ItemDBHandler.java | 14098 | package zoara.sfs2x.extension.db;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
import zoara.sfs2x.extension.simulation.ActivePlayer;
import zoara.sfs2x.extension.simulation.World;
import zoara.sfs2x.extension.simulation.item.InventoryItem;
import zoara.sfs2x.extension.simulation.item.WorldItem;
import com.smartfoxserver.v2.db.IDBManager;
import com.smartfoxserver.v2.exceptions.SFSErrorCode;
import com.smartfoxserver.v2.exceptions.SFSErrorData;
import com.smartfoxserver.v2.extensions.SFSExtension;
public class ItemDBHandler
{
public static void addWorldItem(SFSExtension extension, WorldItem worldItem)
{
IDBManager dbManager = extension.getParentZone().getDBManager();
Connection connection;
try
{
// Grab a connection from the DBManager connection pool
connection = dbManager.getConnection();
PreparedStatement stmt = connection.prepareStatement(
"INSERT INTO world_objects " +
"(TemplateID, UniqueItemID, PositionX, PositionY, PositionZ, " +
"RotationX, RotationY, RotationZ, Grabbable, Skill) " +
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
);
stmt.setInt(1, worldItem.getTemplateID());
stmt.setString(2, worldItem.getUniqueID());
stmt.setFloat(3, worldItem.transform.getPosition()[0]);
stmt.setFloat(4, worldItem.transform.getPosition()[1]);
stmt.setFloat(5, worldItem.transform.getPosition()[2]);
stmt.setFloat(6, worldItem.transform.getRotation()[0]);
stmt.setFloat(7, worldItem.transform.getRotation()[1]);
stmt.setFloat(8, worldItem.transform.getRotation()[2]);
stmt.setBoolean(9, worldItem.isGrabbable());
stmt.setBoolean(10, worldItem.isSkillItem());
// Execute query
stmt.execute();
// Return connection to the DBManager connection pool
connection.close();
}
catch (SQLException e)
{
SFSErrorData errData = new SFSErrorData(SFSErrorCode.GENERIC_ERROR);
errData.addParameter("SQL Error: " + e.getMessage());
extension.trace("A SQL Error occurred: " + e.getMessage());
}
}
public static void destroyWorldItem(SFSExtension extension, WorldItem item)
{
IDBManager dbManager = extension.getParentZone().getDBManager();
Connection connection;
try
{
// Grab a connection from the DBManager connection pool
connection = dbManager.getConnection();
PreparedStatement stmt = connection.prepareStatement(
"UPDATE world_objects SET Destroyed = ?, DestroyTime = NOW() " +
"WHERE uid = ?"
);
stmt.setBoolean(1, item.isDestroyed());
stmt.setInt(2, item.getDBID());
// Execute query
stmt.execute();
// Return connection to the DBManager connection pool
connection.close();
}
catch (SQLException e)
{
SFSErrorData errData = new SFSErrorData(SFSErrorCode.GENERIC_ERROR);
errData.addParameter("SQL Error: " + e.getMessage());
extension.trace("A SQL Error occurred: " + e.getMessage());
}
}
public static void updateWorldItem(SFSExtension extension, WorldItem item)
{
IDBManager dbManager = extension.getParentZone().getDBManager();
Connection connection;
try
{
// Grab a connection from the DBManager connection pool
connection = dbManager.getConnection();
PreparedStatement stmt = connection.prepareStatement(
"UPDATE world_objects SET " +
"PositionX = ?, PositionY = ?, PositionZ = ?, " +
"RotationX = ?, RotationY = ?, RotationZ = ? " +
"WHERE uid = ?"
);
try {
float[] pos = item.transform.getPosition();
stmt.setFloat(1, pos[0]);
stmt.setFloat(2, pos[1]);
stmt.setFloat(3, pos[2]);
} catch (ArrayIndexOutOfBoundsException aioobe) {
extension.trace("Something is wrong here. Skipping position.");
connection.close();
return;
}
try {
float[] rot = item.transform.getRotation();
stmt.setFloat(4, rot[0]);
stmt.setFloat(5, rot[1]);
stmt.setFloat(6, rot[2]);
} catch (ArrayIndexOutOfBoundsException aioobe) {
extension.trace("Something is wrong here. Skipping rotation.");
connection.close();
return;
}
stmt.setInt(7, item.getDBID());
// Execute query
stmt.execute();
// Return connection to the DBManager connection pool
connection.close();
}
catch (SQLException e)
{
SFSErrorData errData = new SFSErrorData(SFSErrorCode.GENERIC_ERROR);
errData.addParameter("SQL Error: " + e.getMessage());
extension.trace("A SQL Error occurred: " + e.getMessage());
}
}
public static void getWorldItems(SFSExtension extension, World world)
{
IDBManager dbManager = extension.getParentZone().getDBManager();
Connection connection;
try
{
// Grab a connection from the DBManager connection pool
connection = dbManager.getConnection();
PreparedStatement stmt = connection.prepareStatement(
"SELECT * FROM world_objects ORDER BY uid ASC"
);
// Execute query
ResultSet res = stmt.executeQuery();
while (res.next())
{
String uniqueID = res.getString("UniqueItemID");
boolean isDestroyed = res.getBoolean("Destroyed");
WorldItem worldItem;
if (isDestroyed) {
worldItem = world.addDestroyedWorldItem(uniqueID);
} else {
worldItem = world.addWorldItem(uniqueID);
}
worldItem.setDBID(res.getInt("uid"));
worldItem.setTemplateID(res.getInt("TemplateID"));
worldItem.setUniqueID(uniqueID);
worldItem.transform.setPositionX(res.getFloat("PositionX"));
worldItem.transform.setPositionY(res.getFloat("PositionY"));
worldItem.transform.setPositionZ(res.getFloat("PositionZ"));
worldItem.transform.setRotationX(res.getFloat("RotationX"));
worldItem.transform.setRotationY(res.getFloat("RotationY"));
worldItem.transform.setRotationZ(res.getFloat("RotationZ"));
worldItem.setGrabbable(res.getBoolean("Grabbable"));
worldItem.setSkillItem(res.getBoolean("Skill"));
worldItem.setDestroyed(isDestroyed);
}
// Return connection to the DBManager connection pool
connection.close();
}
catch (SQLException e)
{
SFSErrorData errData = new SFSErrorData(SFSErrorCode.GENERIC_ERROR);
errData.addParameter("SQL Error: " + e.getMessage());
extension.trace("A SQL Error occurred: " + e.getMessage());
}
}
public static void addInventoryItem(SFSExtension extension, InventoryItem item)
{
IDBManager dbManager = extension.getParentZone().getDBManager();
Connection connection;
try
{
// Grab a connection from the DBManager connection pool
connection = dbManager.getConnection();
PreparedStatement stmt = connection.prepareStatement(
"INSERT INTO player_inventory " +
"(PlayerID, TemplateID, UniqueItemID, InventoryIndex, Quantity) " +
"VALUES (?, ?, ?, ?, ?)"
);
stmt.setInt(1, item.getPlayer().getDBID());
stmt.setInt(2, item.getTemplateID());
stmt.setString(3, item.getUniqueID());
stmt.setInt(4, item.getInventoryIndex());
stmt.setInt(5, item.getQuantity());
// Execute query
stmt.execute();
// Return connection to the DBManager connection pool
connection.close();
}
catch (SQLException e)
{
SFSErrorData errData = new SFSErrorData(SFSErrorCode.GENERIC_ERROR);
errData.addParameter("SQL Error: " + e.getMessage());
extension.trace("A SQL Error occurred: " + e.getMessage());
}
}
public static void updateInventoryItem(SFSExtension extension, InventoryItem item)
{
IDBManager dbManager = extension.getParentZone().getDBManager();
Connection connection;
try
{
// Grab a connection from the DBManager connection pool
connection = dbManager.getConnection();
PreparedStatement stmt = connection.prepareStatement(
"UPDATE player_inventory SET InventoryIndex = ?, Quantity = ? " +
"WHERE uid = ?"
);
stmt.setInt(1, item.getInventoryIndex());
stmt.setInt(2, item.getQuantity());
stmt.setInt(3, item.getDBID());
// Execute query
stmt.execute();
// Return connection to the DBManager connection pool
connection.close();
}
catch (SQLException e)
{
SFSErrorData errData = new SFSErrorData(SFSErrorCode.GENERIC_ERROR);
errData.addParameter("SQL Error: " + e.getMessage());
extension.trace("A SQL Error occurred: " + e.getMessage());
}
}
public static void updateInventoryItems(SFSExtension extension, List<InventoryItem> items)
{
IDBManager dbManager = extension.getParentZone().getDBManager();
Connection connection;
try
{
// Grab a connection from the DBManager connection pool
connection = dbManager.getConnection();
Statement stmt = connection.createStatement();
connection.setAutoCommit(false);
for (InventoryItem item : items)
{
stmt.addBatch("UPDATE player_inventory " +
"SET InventoryIndex = " + item.getInventoryIndex() +
", Quantity = " + item.getQuantity() + " " +
"WHERE uid = " + item.getDBID());
}
// Execute query
stmt.executeBatch();
connection.commit();
// Return connection to the DBManager connection pool
connection.close();
}
catch (SQLException e)
{
SFSErrorData errData = new SFSErrorData(SFSErrorCode.GENERIC_ERROR);
errData.addParameter("SQL Error: " + e.getMessage());
extension.trace("A SQL Error occurred: " + e.getMessage());
}
}
public static void removeInventoryItem(SFSExtension extension, InventoryItem item)
{
IDBManager dbManager = extension.getParentZone().getDBManager();
Connection connection;
try
{
// Grab a connection from the DBManager connection pool
connection = dbManager.getConnection();
PreparedStatement stmt = connection.prepareStatement(
"DELETE FROM player_inventory WHERE uid = ?"
);
stmt.setInt(1, item.getDBID());
// Execute query
stmt.execute();
// Return connection to the DBManager connection pool
connection.close();
}
catch (SQLException e)
{
SFSErrorData errData = new SFSErrorData(SFSErrorCode.GENERIC_ERROR);
errData.addParameter("SQL Error: " + e.getMessage());
extension.trace("A SQL Error occurred: " + e.getMessage());
}
}
public static void removeInventoryItems(SFSExtension extension, List<InventoryItem> items)
{
IDBManager dbManager = extension.getParentZone().getDBManager();
Connection connection;
try
{
// Grab a connection from the DBManager connection pool
connection = dbManager.getConnection();
Statement stmt = connection.createStatement();
connection.setAutoCommit(false);
for (InventoryItem item : items)
{
stmt.addBatch("DELETE FROM player_inventory WHERE uid = " + item.getDBID());
}
// Execute query
stmt.executeBatch();
connection.commit();
// Return connection to the DBManager connection pool
connection.close();
}
catch (SQLException e)
{
SFSErrorData errData = new SFSErrorData(SFSErrorCode.GENERIC_ERROR);
errData.addParameter("SQL Error: " + e.getMessage());
extension.trace("A SQL Error occurred: " + e.getMessage());
}
}
public static void getPlayerInventory(SFSExtension extension, ActivePlayer player)
{
IDBManager dbManager = extension.getParentZone().getDBManager();
Connection connection;
try
{
// Grab a connection from the DBManager connection pool
connection = dbManager.getConnection();
PreparedStatement stmt = connection.prepareStatement(
"SELECT * FROM player_inventory " +
"WHERE PlayerID = ? " +
"ORDER BY InventoryIndex ASC"
);
stmt.setInt(1, player.getDBID());
// Execute query
ResultSet res = stmt.executeQuery();
while (res.next())
{
InventoryItem item = player.inventory.addItem(res.getInt("InventoryIndex"), true);
item.setDBID(res.getInt("uid"));
item.setTemplateID(res.getInt("TemplateID"));
item.setUniqueID(res.getString("UniqueItemID"));
//item.setInventoryIndex(res.getInt("InventoryIndex"));
item.setQuantity(res.getInt("Quantity"));
item.setNew(false); // this was retrieved from the DB. important later.
}
// Return connection to the DBManager connection pool
connection.close();
}
catch (SQLException e)
{
SFSErrorData errData = new SFSErrorData(SFSErrorCode.GENERIC_ERROR);
errData.addParameter("SQL Error: " + e.getMessage());
extension.trace("A SQL Error occurred: " + e.getMessage());
}
}
}
| apache-2.0 |
no-hope/java-toolkit | projects/serialization/src/main/java/org/nohope/serialization/streams/JsonProvider.java | 1218 | package org.nohope.serialization.streams;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.io.IOUtils;
import javax.annotation.Nonnull;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Serializable;
/**
* @author <a href="mailto:ketoth.xupack@gmail.com">ketoth xupack</a>
*/
public final class JsonProvider implements SerializationProvider {
private final ObjectMapper mapper;
public JsonProvider(@Nonnull final ObjectMapper mapper) {
this.mapper = mapper;
}
public JsonProvider() {
this(new ObjectMapper());
}
@Override
public void writeObject(@Nonnull final OutputStream stream, @Nonnull final Serializable object) throws IOException {
mapper.writer().writeValue(stream, object);
}
@Override
public <T extends Serializable> T readObject(@Nonnull final InputStream stream,
@Nonnull final Class<T> clazz) throws IOException {
try {
return mapper.readValue(stream, clazz);
} catch (final IOException e) {
throw new IOException(IOUtils.toString(stream), e);
}
}
}
| apache-2.0 |
authlete/authlete-java-sample-server | src/main/java/com/authlete/sample/server/api/auth/Authorizer.java | 4401 | /*
* Copyright 2014 Authlete, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.authlete.sample.server.api.auth;
import javax.servlet.http.HttpSession;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response;
import org.apache.shiro.SecurityUtils;
import org.apache.shiro.authc.AuthenticationException;
import org.apache.shiro.authc.AuthenticationToken;
import org.apache.shiro.authc.UsernamePasswordToken;
import org.apache.shiro.subject.Subject;
import com.authlete.common.dto.AuthorizationIssueRequest;
class Authorizer extends BaseAuthorizationHandler
{
private final HttpSession mSession;
private final InteractionData mModel;
private final String mUsername;
private final String mPassword;
Authorizer(HttpSession session, InteractionData model, String username, String password)
{
mSession = session;
mModel = model;
mUsername = username;
mPassword = password;
}
public Response handle()
{
// Login using the credentials.
Subject user = login();
// Check if the username matches the requested subject.
checkRequiredSubject(user);
// Issue an authorization code, an ID token and/or an access token.
return issue(user);
}
private Subject login()
{
// Pack the username and password into AuthenticationToken
// which Apache Shiro's SecurityManager can accept.
AuthenticationToken credentials = new UsernamePasswordToken(mUsername, mPassword);
Subject user = SecurityUtils.getSubject();
try
{
// Login using the credentials.
user.login(credentials);
}
catch (AuthenticationException e)
{
// Login failed.
error("Login failed: username=%s, error=%s (%s)",
mUsername, e.getMessage(), e.getClass().getSimpleName());
// TODO: Internationalize the error message.
String errorMessage = "The user's credentials are invalid.";
// Go back to the authorization page.
throw goBack(errorMessage);
}
return user;
}
private void checkRequiredSubject(Subject user)
{
// The subject that the client application requires.
String requiredSubject = mModel.getRes().getSubject();
if (requiredSubject == null)
{
// Not required.
return;
}
// The current subject.
String currentSubject = (String)user.getPrincipal();
if (requiredSubject.equals(currentSubject))
{
// OK.
return;
}
// TODO: Internationalize the error message.
String errorMessage = "Login by the account '" + requiredSubject + "'.";
// Go back to the authorization page.
throw goBack(errorMessage);
}
private WebApplicationException goBack(String errorMessage)
{
// Set the error message.
mModel.setErrorMessage(errorMessage);
// Set back the model object into the session.
mSession.setAttribute("model", mModel);
// Create a response to show the authorization page again.
Response response = new AuthorizationViewable(mModel).toResponse();
// Show the page.
return new WebApplicationException(response);
}
private Response issue(Subject user)
{
// Create a request for Authlete's /auth/authorization/issue API.
AuthorizationIssueRequest request = createIssueRequest(user);
// Issue an authorization code, an ID token and/or an access token.
return issue(request);
}
private AuthorizationIssueRequest createIssueRequest(Subject user)
{
return new AuthorizationIssueRequestCreator(mModel.getRes(), user).create();
}
}
| apache-2.0 |
nherbaut/erel-maven-plugin | src/main/java/net/erel/maven/plugins/mojo/PGXAbstractMojo.java | 3677 | package net.erel.maven.plugins.mojo;
import net.erel.maven.plugins.service.bugtracking.BugTrackingServiceFactory;
import net.erel.maven.plugins.service.bugtracking.BugTrackingServiceFactoryDefault;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugins.annotations.Component;
import org.apache.maven.plugins.annotations.Parameter;
import org.codehaus.plexus.components.interactivity.Prompter;
public abstract class PGXAbstractMojo extends AbstractMojo implements
BugTrackerAwareMojo {
/*
* (non-Javadoc)
*
* @see net.erel.maven.plugins.BugTrackerAwareMojo#getPrompter()
*/
@Override
public Prompter getPrompter() {
return prompter;
}
/*
* (non-Javadoc)
*
* @see net.erel.maven.plugins.BugTrackerAwareMojo#getRedmineAPIKey()
*/
@Override
public String getRedmineAPIKey() {
return redmineAPIKey;
}
/*
* (non-Javadoc)
*
* @see net.erel.maven.plugins.BugTrackerAwareMojo#getRedmineHost()
*/
@Override
public String getRedmineHost() {
return redmineHost;
}
/*
* (non-Javadoc)
*
* @see net.erel.maven.plugins.BugTrackerAwareMojo#getNexusHost()
*/
@Override
public String getNexusHost() {
return nexusHost;
}
/*
* (non-Javadoc)
*
* @see net.erel.maven.plugins.BugTrackerAwareMojo#getNexusUser()
*/
@Override
public String getNexusUser() {
return nexusUser;
}
/*
* (non-Javadoc)
*
* @see net.erel.maven.plugins.BugTrackerAwareMojo#getNexusPassword()
*/
@Override
public String getNexusPassword() {
return nexusPassword;
}
/*
* (non-Javadoc)
*
* @see net.erel.maven.plugins.BugTrackerAwareMojo#getJiraHost()
*/
@Override
public String getJiraHost() {
return jiraHost;
}
/*
* (non-Javadoc)
*
* @see net.erel.maven.plugins.BugTrackerAwareMojo#getJiraUser()
*/
@Override
public String getJiraUser() {
return jiraUser;
}
/*
* (non-Javadoc)
*
* @see net.erel.maven.plugins.BugTrackerAwareMojo#getJiraPassword()
*/
@Override
public String getJiraPassword() {
return jiraPassword;
}
@Component
protected Prompter prompter;
/**
* redmine API Key, as found in, to put as a property of the settings.xml
* for your convenience
*/
@Parameter(property = "redmineAPIKey", required = true)
protected String redmineAPIKey;
/**
* url to the redmine host repository, to put as a property of the
* settings.xml for your convenience
*/
@Parameter(property = "redmineHost", required = false, defaultValue = "")
protected String redmineHost;
/**
* url to the nexus repo, to put as a property of the settings.xml for your
* convenience
*/
@Parameter(property = "nexusHost", required = false, defaultValue = "http://repo.erel.net")
protected String nexusHost;
/**
* user to use for nexus authentication, to put as a property of the
* settings.xml for your convenience
*/
@Parameter(property = "nexusUser", required = true)
protected String nexusUser;
/**
* password to use for nexus authentication, to put as a property of the
* settings.xml for your convenience
*/
@Parameter(property = "nexusPassword", required = true)
protected String nexusPassword;
@Parameter(property = "jiraHost", required = false, defaultValue = "")
protected String jiraHost;
@Parameter(property = "jiraUser", required = false, defaultValue = "")
protected String jiraUser;
@Parameter(property = "jiraPassword", required = false, defaultValue = "")
protected String jiraPassword;
public PGXAbstractMojo() {
super();
}
public BugTrackingServiceFactory getBugTrackingFactory() {
return new BugTrackingServiceFactoryDefault(jiraHost, jiraUser,
jiraPassword, redmineAPIKey, redmineHost);
}
} | apache-2.0 |
btmura/rbb | src/com/btmura/android/reddit/app/Controller.java | 1071 | /*
* Copyright (C) 2013 Brian Muramatsu
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.btmura.android.reddit.app;
import android.database.Cursor;
import android.os.Bundle;
import android.support.v4.content.Loader;
import android.widget.ListAdapter;
interface Controller<A extends ListAdapter> {
// Lifecycle methods
void restoreInstanceState(Bundle savedInstanceState);
void saveInstanceState(Bundle outState);
// Loader-related methods
Loader<Cursor> createLoader();
void swapCursor(Cursor cursor);
A getAdapter();
}
| apache-2.0 |
google/graphicsfuzz | reducer/src/test/java/com/graphicsfuzz/reducer/reductionopportunities/ExprToConstantReductionOpportunitiesTest.java | 8404 | /*
* Copyright 2018 The GraphicsFuzz Project Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.graphicsfuzz.reducer.reductionopportunities;
import static org.junit.Assert.assertEquals;
import com.graphicsfuzz.common.ast.TranslationUnit;
import com.graphicsfuzz.common.glslversion.ShadingLanguageVersion;
import com.graphicsfuzz.common.tool.PrettyPrinterVisitor;
import com.graphicsfuzz.common.util.CompareAsts;
import com.graphicsfuzz.common.util.IdGenerator;
import com.graphicsfuzz.common.util.ParseHelper;
import com.graphicsfuzz.common.util.RandomWrapper;
import java.util.List;
import org.junit.Test;
public class ExprToConstantReductionOpportunitiesTest {
@Test
public void testOut() throws Exception {
final String prog = "void f(out int x) { } void main() { int a; f(a); }";
TranslationUnit tu = ParseHelper.parse(prog);
List<SimplifyExprReductionOpportunity> ops = ExprToConstantReductionOpportunities
.findOpportunities(MakeShaderJobFromFragmentShader.make(tu),
new ReducerContext(true, true, ShadingLanguageVersion.ESSL_100,
new RandomWrapper(0), new IdGenerator()));
for (SimplifyExprReductionOpportunity op : ops) {
op.applyReduction();
}
assertEquals(PrettyPrinterVisitor.prettyPrintAsString(ParseHelper.parse(prog)),
PrettyPrinterVisitor.prettyPrintAsString(tu));
}
@Test
public void testInOut() throws Exception {
final String prog = "void f(inout int x) { } void main() { int a; f(a); }";
TranslationUnit tu = ParseHelper.parse(prog);
List<SimplifyExprReductionOpportunity> ops = ExprToConstantReductionOpportunities
.findOpportunities(MakeShaderJobFromFragmentShader.make(tu),
new ReducerContext(true, true, ShadingLanguageVersion.ESSL_100,
new RandomWrapper(0), new IdGenerator()));
for (SimplifyExprReductionOpportunity op : ops) {
op.applyReduction();
}
assertEquals(PrettyPrinterVisitor.prettyPrintAsString(ParseHelper.parse(prog)),
PrettyPrinterVisitor.prettyPrintAsString(tu));
}
@Test
public void testIn() throws Exception {
final String prog = "void f(in int x) { } void main() { int a; f(a); }";
final String expectedProg = "void f(in int x) { } void main() { int a; f(1); }";
TranslationUnit tu = ParseHelper.parse(prog);
List<SimplifyExprReductionOpportunity> ops = ExprToConstantReductionOpportunities
.findOpportunities(MakeShaderJobFromFragmentShader.make(tu),
new ReducerContext(true, true, ShadingLanguageVersion.ESSL_100,
new RandomWrapper(0), new IdGenerator()));
for (SimplifyExprReductionOpportunity op : ops) {
op.applyReduction();
}
assertEquals(PrettyPrinterVisitor.prettyPrintAsString(ParseHelper.parse(expectedProg)),
PrettyPrinterVisitor.prettyPrintAsString(tu));
}
@Test
public void testSingleLiveVariable() throws Exception {
final String program = "void main() { int GLF_live3_a; GLF_live3_a; }";
final TranslationUnit tu = ParseHelper.parse(program);
final List<SimplifyExprReductionOpportunity> ops = ExprToConstantReductionOpportunities
.findOpportunities(MakeShaderJobFromFragmentShader.make(tu),
new ReducerContext(false, true, ShadingLanguageVersion.ESSL_100,
new RandomWrapper(0),
new IdGenerator()));
assertEquals(1, ops.size());
ops.get(0).applyReduction();
CompareAsts.assertEqualAsts("void main() { int GLF_live3_a; 1; }", tu);
}
@Test
public void testSwitch() throws Exception {
final String program = "#version 310 es\n"
+ "void main() {\n"
+ " switch(1) {\n"
+ " case - 1:\n"
+ " 1;\n"
+ " }\n"
+ "}\n";
final TranslationUnit tu = ParseHelper.parse(program);
final List<SimplifyExprReductionOpportunity> ops = ExprToConstantReductionOpportunities
.findOpportunities(MakeShaderJobFromFragmentShader.make(tu),
new ReducerContext(true, true, ShadingLanguageVersion.ESSL_310,
new RandomWrapper(0),
new IdGenerator()));
assertEquals(0, ops.size());
}
@Test
public void testSwitchMultipleCases() throws Exception {
// This test checks that switch cases are not simplified to canonical literals, because in the
// following we do not want multiple identical cases:
final String program = "#version 310 es\n"
+ "void main() {\n"
+ " switch(1) {\n"
+ " case 1:\n"
+ " case 2:\n"
+ " case 3:\n"
+ " 1;\n"
+ " }\n"
+ "}\n";
final TranslationUnit tu = ParseHelper.parse(program);
final List<SimplifyExprReductionOpportunity> ops = ExprToConstantReductionOpportunities
.findOpportunities(MakeShaderJobFromFragmentShader.make(tu),
new ReducerContext(true, true, ShadingLanguageVersion.ESSL_310,
new RandomWrapper(0),
new IdGenerator()));
assertEquals(0, ops.size());
}
@Test
public void testSimplifyLiterals() throws Exception {
final String program = "#version 310 es\n"
+ "struct S {\n"
+ " int a;\n"
+ " float b;\n"
+ "};\n"
+ "void main() {\n"
+ " 100;\n"
+ " 200.0;\n"
+ " 300u;\n"
+ " false;\n"
+ " vec2(10.0, 1.0);\n"
+ " vec3(11.0);\n"
+ " vec4(2.0, 3.0, 4.0);\n"
+ " ivec2(1, 2);\n"
+ " ivec3(1, 4, 6);\n"
+ " ivec4(1, 8, 9);\n"
+ " uvec2(6u);\n"
+ " uvec3(7u, 8u);\n"
+ " uvec4(9u, 10u, 11u, 12u);\n"
+ " bvec2(false, true);\n"
+ " bvec3(true, false, true);\n"
+ " bvec4(false, true, false, true);\n"
+ " mat2x2(10.0, 20.0, 30.0, 40.0);\n"
+ " mat2x3(10.0, 20.0, 30.0, 40.0, 50.0, 60.0);\n"
+ " mat2x4(10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0);\n"
+ " mat3x2(10.0, 20.0, 30.0, 40.0, 50.0, 60.0);\n"
+ " mat3x3(10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0);\n"
+ " mat3x4(10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0, 110.0, 120.0);\n"
+ " mat4x2(10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0);\n"
+ " mat4x3(10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0, 110.0, 120.0);\n"
+ " mat4x4(10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0, 110.0, 120.0, "
+ "130.0, 140.0, 150.0, 160.0);\n"
+ " S(6, 2.0);\n"
+ "}";
final String expected = "#version 310 es\n"
+ "struct S {\n"
+ " int a;\n"
+ " float b;\n"
+ "};\n"
+ "void main() {\n"
+ " 1;\n"
+ " 1.0;\n"
+ " 1u;\n"
+ " true;\n"
+ " vec2(1.0);\n"
+ " vec3(1.0);\n"
+ " vec4(1.0);\n"
+ " ivec2(1);\n"
+ " ivec3(1);\n"
+ " ivec4(1);\n"
+ " uvec2(1u);\n"
+ " uvec3(1u);\n"
+ " uvec4(1u);\n"
+ " bvec2(true);\n"
+ " bvec3(true);\n"
+ " bvec4(true);\n"
+ " mat2(1.0);\n"
+ " mat2x3(1.0);\n"
+ " mat2x4(1.0);\n"
+ " mat3x2(1.0);\n"
+ " mat3(1.0);\n"
+ " mat3x4(1.0);\n"
+ " mat4x2(1.0);\n"
+ " mat4x3(1.0);\n"
+ " mat4(1.0);\n"
+ " S(1, 1.0);\n"
+ "}";
final TranslationUnit tu = ParseHelper.parse(program);
final List<SimplifyExprReductionOpportunity> ops = ExprToConstantReductionOpportunities
.findOpportunities(MakeShaderJobFromFragmentShader.make(tu),
new ReducerContext(true, true, ShadingLanguageVersion.ESSL_100,
new RandomWrapper(0),
new IdGenerator()));
for (SimplifyExprReductionOpportunity op : ops) {
op.applyReduction();
}
CompareAsts.assertEqualAsts(expected, tu);
}
}
| apache-2.0 |
OpenFeign/feign | core/src/main/java/feign/Request.java | 13410 | /*
* Copyright 2012-2022 The Feign Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package feign;
import java.io.Serializable;
import java.net.HttpURLConnection;
import java.nio.charset.Charset;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import static feign.Util.checkNotNull;
import static feign.Util.valuesOrEmpty;
/**
* An immutable request to an http server.
*/
public final class Request implements Serializable {
public enum HttpMethod {
GET, HEAD, POST, PUT, DELETE, CONNECT, OPTIONS, TRACE, PATCH
}
public enum ProtocolVersion {
HTTP_1_0("HTTP/1.0"), HTTP_1_1("HTTP/1.1"), HTTP_2("HTTP/2.0"), MOCK;
final String protocolVersion;
ProtocolVersion() {
protocolVersion = name();
}
ProtocolVersion(String protocolVersion) {
this.protocolVersion = protocolVersion;
}
@Override
public String toString() {
return protocolVersion;
}
}
/**
* No parameters can be null except {@code body} and {@code charset}. All parameters must be
* effectively immutable, via safe copies, not mutating or otherwise.
*
* @deprecated {@link #create(HttpMethod, String, Map, byte[], Charset)}
*/
@Deprecated
public static Request create(String method,
String url,
Map<String, Collection<String>> headers,
byte[] body,
Charset charset) {
checkNotNull(method, "httpMethod of %s", method);
final HttpMethod httpMethod = HttpMethod.valueOf(method.toUpperCase());
return create(httpMethod, url, headers, body, charset, null);
}
/**
* Builds a Request. All parameters must be effectively immutable, via safe copies.
*
* @param httpMethod for the request.
* @param url for the request.
* @param headers to include.
* @param body of the request, can be {@literal null}
* @param charset of the request, can be {@literal null}
* @return a Request
*/
@Deprecated
public static Request create(HttpMethod httpMethod,
String url,
Map<String, Collection<String>> headers,
byte[] body,
Charset charset) {
return create(httpMethod, url, headers, Body.create(body, charset), null);
}
/**
* Builds a Request. All parameters must be effectively immutable, via safe copies.
*
* @param httpMethod for the request.
* @param url for the request.
* @param headers to include.
* @param body of the request, can be {@literal null}
* @param charset of the request, can be {@literal null}
* @return a Request
*/
public static Request create(HttpMethod httpMethod,
String url,
Map<String, Collection<String>> headers,
byte[] body,
Charset charset,
RequestTemplate requestTemplate) {
return create(httpMethod, url, headers, Body.create(body, charset), requestTemplate);
}
/**
* Builds a Request. All parameters must be effectively immutable, via safe copies.
*
* @param httpMethod for the request.
* @param url for the request.
* @param headers to include.
* @param body of the request, can be {@literal null}
* @return a Request
*/
public static Request create(HttpMethod httpMethod,
String url,
Map<String, Collection<String>> headers,
Body body,
RequestTemplate requestTemplate) {
return new Request(httpMethod, url, headers, body, requestTemplate);
}
private final HttpMethod httpMethod;
private final String url;
private final Map<String, Collection<String>> headers;
private final Body body;
private final RequestTemplate requestTemplate;
private final ProtocolVersion protocolVersion;
/**
* Creates a new Request.
*
* @param method of the request.
* @param url for the request.
* @param headers for the request.
* @param body for the request, optional.
* @param requestTemplate used to build the request.
*/
Request(HttpMethod method,
String url,
Map<String, Collection<String>> headers,
Body body,
RequestTemplate requestTemplate) {
this.httpMethod = checkNotNull(method, "httpMethod of %s", method.name());
this.url = checkNotNull(url, "url");
this.headers = checkNotNull(headers, "headers of %s %s", method, url);
this.body = body;
this.requestTemplate = requestTemplate;
protocolVersion = ProtocolVersion.HTTP_1_1;
}
/**
* Http Method for this request.
*
* @return the HttpMethod string
* @deprecated @see {@link #httpMethod()}
*/
@Deprecated
public String method() {
return httpMethod.name();
}
/**
* Http Method for the request.
*
* @return the HttpMethod.
*/
public HttpMethod httpMethod() {
return this.httpMethod;
}
/**
* URL for the request.
*
* @return URL as a String.
*/
public String url() {
return url;
}
/**
* Request Headers.
*
* @return the request headers.
*/
public Map<String, Collection<String>> headers() {
return Collections.unmodifiableMap(headers);
}
/**
* Charset of the request.
*
* @return the current character set for the request, may be {@literal null} for binary data.
*/
public Charset charset() {
return body.encoding;
}
/**
* If present, this is the replayable body to send to the server. In some cases, this may be
* interpretable as text.
*
* @see #charset()
*/
public byte[] body() {
return body.data;
}
public boolean isBinary() {
return body.isBinary();
}
/**
* Request Length.
*
* @return size of the request body.
*/
public int length() {
return this.body.length();
}
/**
* Request HTTP protocol version
*
* @return HTTP protocol version
*/
public ProtocolVersion protocolVersion() {
return protocolVersion;
}
/**
* Request as an HTTP/1.1 request.
*
* @return the request.
*/
@Override
public String toString() {
final StringBuilder builder = new StringBuilder();
builder.append(httpMethod).append(' ').append(url).append(" HTTP/1.1\n");
for (final String field : headers.keySet()) {
for (final String value : valuesOrEmpty(headers, field)) {
builder.append(field).append(": ").append(value).append('\n');
}
}
if (body != null) {
builder.append('\n').append(body.asString());
}
return builder.toString();
}
/**
* Controls the per-request settings currently required to be implemented by all {@link Client
* clients}
*/
public static class Options {
private final long connectTimeout;
private final TimeUnit connectTimeoutUnit;
private final long readTimeout;
private final TimeUnit readTimeoutUnit;
private final boolean followRedirects;
/**
* Creates a new Options instance.
*
* @param connectTimeoutMillis connection timeout in milliseconds.
* @param readTimeoutMillis read timeout in milliseconds.
* @param followRedirects if the request should follow 3xx redirections.
*
* @deprecated please use {@link #Options(long, TimeUnit, long, TimeUnit, boolean)}
*/
@Deprecated
public Options(int connectTimeoutMillis, int readTimeoutMillis, boolean followRedirects) {
this(connectTimeoutMillis, TimeUnit.MILLISECONDS,
readTimeoutMillis, TimeUnit.MILLISECONDS,
followRedirects);
}
/**
* Creates a new Options Instance.
*
* @param connectTimeout value.
* @param connectTimeoutUnit with the TimeUnit for the timeout value.
* @param readTimeout value.
* @param readTimeoutUnit with the TimeUnit for the timeout value.
* @param followRedirects if the request should follow 3xx redirections.
*/
public Options(long connectTimeout, TimeUnit connectTimeoutUnit,
long readTimeout, TimeUnit readTimeoutUnit,
boolean followRedirects) {
super();
this.connectTimeout = connectTimeout;
this.connectTimeoutUnit = connectTimeoutUnit;
this.readTimeout = readTimeout;
this.readTimeoutUnit = readTimeoutUnit;
this.followRedirects = followRedirects;
}
/**
* Creates a new Options instance that follows redirects by default.
*
* @param connectTimeoutMillis connection timeout in milliseconds.
* @param readTimeoutMillis read timeout in milliseconds.
*
* @deprecated please use {@link #Options(long, TimeUnit, long, TimeUnit, boolean)}
*/
@Deprecated
public Options(int connectTimeoutMillis, int readTimeoutMillis) {
this(connectTimeoutMillis, readTimeoutMillis, true);
}
/**
* Creates the new Options instance using the following defaults:
* <ul>
* <li>Connect Timeout: 10 seconds</li>
* <li>Read Timeout: 60 seconds</li>
* <li>Follow all 3xx redirects</li>
* </ul>
*/
public Options() {
this(10, TimeUnit.SECONDS, 60, TimeUnit.SECONDS, true);
}
/**
* Defaults to 10 seconds. {@code 0} implies no timeout.
*
* @see java.net.HttpURLConnection#getConnectTimeout()
*/
public int connectTimeoutMillis() {
return (int) connectTimeoutUnit.toMillis(connectTimeout);
}
/**
* Defaults to 60 seconds. {@code 0} implies no timeout.
*
* @see java.net.HttpURLConnection#getReadTimeout()
*/
public int readTimeoutMillis() {
return (int) readTimeoutUnit.toMillis(readTimeout);
}
/**
* Defaults to true. {@code false} tells the client to not follow the redirections.
*
* @see HttpURLConnection#getFollowRedirects()
*/
public boolean isFollowRedirects() {
return followRedirects;
}
/**
* Connect Timeout Value.
*
* @return current timeout value.
*/
public long connectTimeout() {
return connectTimeout;
}
/**
* TimeUnit for the Connection Timeout value.
*
* @return TimeUnit
*/
public TimeUnit connectTimeoutUnit() {
return connectTimeoutUnit;
}
/**
* Read Timeout value.
*
* @return current read timeout value.
*/
public long readTimeout() {
return readTimeout;
}
/**
* TimeUnit for the Read Timeout value.
*
* @return TimeUnit
*/
public TimeUnit readTimeoutUnit() {
return readTimeoutUnit;
}
}
@Experimental
public RequestTemplate requestTemplate() {
return this.requestTemplate;
}
/**
* Request Body
* <p>
* Considered experimental, will most likely be made internal going forward.
* </p>
*/
@Experimental
public static class Body implements Serializable {
private transient Charset encoding;
private byte[] data;
private Body() {
super();
}
private Body(byte[] data) {
this.data = data;
}
private Body(byte[] data, Charset encoding) {
this.data = data;
this.encoding = encoding;
}
public Optional<Charset> getEncoding() {
return Optional.ofNullable(this.encoding);
}
public int length() {
/* calculate the content length based on the data provided */
return data != null ? data.length : 0;
}
public byte[] asBytes() {
return data;
}
public String asString() {
return !isBinary()
? new String(data, encoding)
: "Binary data";
}
public boolean isBinary() {
return encoding == null || data == null;
}
public static Body create(String data) {
return new Body(data.getBytes());
}
public static Body create(String data, Charset charset) {
return new Body(data.getBytes(charset), charset);
}
public static Body create(byte[] data) {
return new Body(data);
}
public static Body create(byte[] data, Charset charset) {
return new Body(data, charset);
}
/**
* Creates a new Request Body with charset encoded data.
*
* @param data to be encoded.
* @param charset to encode the data with. if {@literal null}, then data will be considered
* binary and will not be encoded.
*
* @return a new Request.Body instance with the encoded data.
* @deprecated please use {@link Request.Body#create(byte[], Charset)}
*/
@Deprecated
public static Body encoded(byte[] data, Charset charset) {
return create(data, charset);
}
public static Body empty() {
return new Body();
}
}
}
| apache-2.0 |
dianping/zebra | zebra-admin-web/src/main/java/com/dianping/zebra/administrator/config/ZookeeperService.java | 3454 | package com.dianping.zebra.administrator.config;
import com.dianping.zebra.util.StringUtils;
import org.apache.zookeeper.*;
import org.apache.zookeeper.data.Stat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
/**
* Created by taochen on 2018/10/28.
*/
public class ZookeeperService {
private static Logger LOGGER = LoggerFactory.getLogger(ZookeeperService.class);
private static ZooKeeper zooKeeper;
private static ZooKeeperConnection zooKeeperConn = new ZooKeeperConnection();
public static boolean createKey(String host, String key) {
String path = parsePath(key);
String pathTemp = "";
byte[] b = pathTemp.getBytes();
boolean result = false;
try {
zooKeeper = zooKeeperConn.connect(host);
for (int i = 0; i < path.length(); ++i) {
if (path.charAt(i) == '/' && StringUtils.isNotBlank(pathTemp) &&
configExists(zooKeeper, pathTemp) == null) {
zooKeeper.create(pathTemp, b, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
}
pathTemp += path.charAt(i);
}
zooKeeper.create(path, b, ZooDefs.Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);
result = true;
} catch (Exception e) {
LOGGER.error("create key [" + key + "] fail, host:" + host, e);
result = false;
} finally {
zooKeeperConn.close();
}
return result;
}
private static Stat configExists(ZooKeeper zooKeeperExecute, String path) throws KeeperException, InterruptedException {
Stat result = zooKeeperExecute.exists(path, true);
return result;
}
public static byte[] getConfig(String host, String key) {
String path = parsePath(key);
try {
zooKeeper = zooKeeperConn.connect(host);
Stat stat = configExists(zooKeeper, path);
if (stat == null) {
return null;
}
byte[] data = zooKeeper.getData(path, false, null);
return data;
} catch (Exception e) {
LOGGER.error("get value fail, key [" + key + "],host:" + host, e);
} finally {
zooKeeperConn.close();
}
return null;
}
public static void setConfig(String host, String key, byte[] data) {
String path = parsePath(key);
try {
zooKeeper = zooKeeperConn.connect(host);
zooKeeper.setData(path, data, configExists(zooKeeper, path).getVersion());
} catch (Exception e) {
LOGGER.error("set key [" + key + "] fail!", e);
} finally {
zooKeeperConn.close();
}
}
public static boolean deleteConfig(String host, String key) {
String path = parsePath(key);
boolean result = false;
try {
zooKeeper = zooKeeperConn.connect(host);
zooKeeper.delete(path, configExists(zooKeeper, path).getVersion());
result = true;
} catch (Exception e) {
LOGGER.error("delete key [" + key + "] fail!", e);
result = false;
} finally {
zooKeeperConn.close();
}
return result;
}
private static String parsePath(String key) {
String path = key.replace('.', '/');
path = "/" +path;
return path;
}
}
| apache-2.0 |
pzjack/webframe | src/main/java/com/dinglicom/dispatch/domain/DispathingTaskResp.java | 1015 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.dinglicom.dispatch.domain;
import com.dinglicom.frame.sys.domain.BaseMsgBean;
import java.util.List;
/**
* 送奶工任务查询结果
* @author panzhen
*/
public class DispathingTaskResp extends BaseMsgBean {
private long total_page;
private List<DispathingTaskItem> data;
/**
* @return the total_page
*/
public long getTotal_page() {
return total_page;
}
/**
* @param total_page the total_page to set
*/
public void setTotal_page(long total_page) {
this.total_page = total_page;
}
/**
* @return the data
*/
public List<DispathingTaskItem> getData() {
return data;
}
/**
* @param data the data to set
*/
public void setData(List<DispathingTaskItem> data) {
this.data = data;
}
}
| apache-2.0 |
steschw/bpmn-simulator | animation/src/main/java/com/googlecode/bpmn_simulator/animation/ref/CastReference.java | 1599 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.googlecode.bpmn_simulator.animation.ref;
public class CastReference<E, T extends E>
implements Reference<T> {
private final Reference<E> reference;
private final Class<T> clazz;
public CastReference(final Reference<E> reference, final Class<T> clazz) {
super();
this.reference = reference;
this.clazz = clazz;
}
@Override
public boolean hasReference() {
return (reference != null)
&& reference.hasReference();
}
private boolean hasType(final E referenced) {
return clazz.isAssignableFrom(referenced.getClass());
}
@Override
public T getReferenced() {
if (hasReference()) {
final E referenced = reference.getReferenced();
if ((referenced != null) && hasType(referenced)) {
return clazz.cast(referenced);
}
}
return null;
}
}
| apache-2.0 |
davidmoten/state-machine | state-machine-example-shopping-definition/src/main/java/com/github/davidmoten/fsm/example/shop/catalog/Create.java | 280 | package com.github.davidmoten.fsm.example.shop.catalog;
import com.github.davidmoten.bean.annotation.GenerateImmutable;
import com.github.davidmoten.fsm.runtime.Event;
@GenerateImmutable
public class Create implements Event<Catalog> {
String catalogId;
String name;
}
| apache-2.0 |
adjack/ExampleTools | app/src/main/java/activity/example/yuan/cn/exampletools/utils/ContentUtil.java | 5378 | package activity.example.yuan.cn.exampletools.utils;
import android.annotation.SuppressLint;
import android.content.ContentUris;
import android.content.Context;
import android.database.Cursor;
import android.net.Uri;
import android.os.Build;
import android.os.Environment;
import android.provider.DocumentsContract;
import android.provider.MediaStore;
/**
* 内容通用操作封装
*
* @author KenChung
*/
public class ContentUtil {
@SuppressLint("NewApi")
public static String getPath(final Context context, final Uri uri) {
final boolean isKitKat = Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT;
// DocumentProvider
if (isKitKat && DocumentsContract.isDocumentUri(context, uri)) {
// ExternalStorageProvider
if (isExternalStorageDocument(uri)) {
final String docId = DocumentsContract.getDocumentId(uri);
final String[] split = docId.split(":");
final String type = split[0];
if ("primary".equalsIgnoreCase(type)) {
return String.format("%s/%s", Environment.getExternalStorageDirectory().getPath(), split[1]);
}
// TODO handle non-primary volumes
}
// DownloadsProvider
else if (isDownloadsDocument(uri)) {
final String id = DocumentsContract.getDocumentId(uri);
final Uri contentUri = ContentUris.withAppendedId(
Uri.parse("content://downloads/public_downloads"), Long.valueOf(id));
return getDataColumn(context, contentUri, null, null);
}
// MediaProvider
else if (isMediaDocument(uri)) {
final String docId = DocumentsContract.getDocumentId(uri);
final String[] split = docId.split(":");
final String type = split[0];
Uri contentUri = null;
if ("image".equals(type)) {
contentUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI;
} else if ("video".equals(type)) {
contentUri = MediaStore.Video.Media.EXTERNAL_CONTENT_URI;
} else if ("audio".equals(type)) {
contentUri = MediaStore.Audio.Media.EXTERNAL_CONTENT_URI;
}
final String selection = "_id=?";
final String[] selectionArgs = new String[]{split[1]};
return getDataColumn(context, contentUri, selection, selectionArgs);
}
}
// MediaStore (and general)
else if ("content".equalsIgnoreCase(uri.getScheme())) {
// Return the remote address
if (isGooglePhotosUri(uri)) {
return uri.getLastPathSegment();
}
return getDataColumn(context, uri, null, null);
}
// File
else if ("file".equalsIgnoreCase(uri.getScheme())) {
return uri.getPath();
}
return null;
}
/**
* Get the value of the data column for this Uri. This is useful for
* MediaStore Uris, and other file-based ContentProviders.
*
* @param context The context.
* @param uri The Uri to query.
* @param selection (Optional) Filter used in the query.
* @param selectionArgs (Optional) Selection arguments used in the query.
* @return The value of the _data column, which is typically a file path.
*/
public static String getDataColumn(Context context, Uri uri, String selection, String[] selectionArgs) {
Cursor cursor = null;
try {
cursor = context.getContentResolver().query(uri, new String[]{MediaStore.Images.Media.DATA}
, selection, selectionArgs, null);
if (cursor != null && cursor.moveToFirst()) {
final int index = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA);
return cursor.getString(index);
}
} finally {
if (cursor != null) {
cursor.close();
}
}
return null;
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is ExternalStorageProvider.
*/
public static boolean isExternalStorageDocument(Uri uri) {
return "com.android.externalstorage.documents".equals(uri.getAuthority());
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is DownloadsProvider.
*/
public static boolean isDownloadsDocument(Uri uri) {
return "com.android.providers.downloads.documents".equals(uri.getAuthority());
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is MediaProvider.
*/
public static boolean isMediaDocument(Uri uri) {
return "com.android.providers.media.documents".equals(uri.getAuthority());
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is Google Photos.
*/
public static boolean isGooglePhotosUri(Uri uri) {
return "com.google.android.apps.photos.content".equals(uri.getAuthority());
}
} | apache-2.0 |
Ray1184/JRD3_Project | JRD3_Tools/src/main/java/org/jrd3/editor/Editor.java | 10295 | package org.jrd3.editor;
import org.joml.Vector3f;
import org.jrd3.editor.gui.*;
import org.jrd3.engine.playenv.interaction.CamView;
import org.jrd3.engine.playenv.interaction.map.PathsMap;
import org.jrd3.engine.playenv.interaction.map.Sector;
import org.jrd3.engine.playenv.interaction.map.ViewMap;
import org.jrd3.engine.playenv.interaction.map.WalkMap;
import processing.core.PVector;
import javax.swing.*;
import java.io.*;
import java.util.ArrayList;
/**
* Main editor.
*
* @author Ray1184
* @version 1.0
*/
public class Editor extends Window {
private Button load, save, newSector, exit, confirm;
private InputText x, y, z, rx, ry, rz, newSectorGroupName;
private ListItem<Sector> sectorGroupSelection;
private GUIElement camSettings;
private InteractiveMap interactiveMap;
public static final String DK_VIEW_X = "V_X";
public static final String DK_VIEW_Y = "V_Y";
public static final String DK_VIEW_Z = "V_Z";
public static final String DK_VIEW_RX = "V_RX";
public static final String DK_VIEW_RY = "V_RY";
public static final String DK_VIEW_RZ = "V_RZ";
public static final PVector OK_MAIN_COLOR = new PVector(200, 52, 0);
public static final PVector OK_BACK_COLOR = new PVector(64, 8, 0);
public static final PVector OK_LABEL_COLOR = new PVector(200, 200, 200);
private boolean refreshInputText;
@Override
public void setup() {
super.setup();
interactiveMap = new InteractiveMap("WM_MAP", this, 440, 175, 115, 45, null, true);
camSettings = new GUIElement("SETTINGS_G", this, 8, 8, 420, 790, null, false);
newSector = new Button("NEW_BT", this, 740, 93, 170, 75, "New", true, Editor.OK_MAIN_COLOR, Editor.OK_BACK_COLOR, Editor.OK_LABEL_COLOR);
newSector.setActionListener(args -> {
String sgName = newSectorGroupName.getText();
if (sgName != null && !sgName.isEmpty()) {
sectorGroupSelection.addElement(sgName, new ArrayList<>());
}
newSectorGroupName.setText("");
refreshInputText = true;
});
newSectorGroupName = new InputText("NEW_TX", this, 435, 102, 295, 52, null, false, 6);
save = new Button("SAVE_BT", this, 740, 8, 170, 75, "Save", true);
save.setActionListener(args -> {
if (interactiveMap.getWalkMap() != null) {
selectOutput("Save map", "onMapSave");
}
});
load = new Button("LOAD_BT", this, 920, 8, 170, 75, "Load", true);
load.setActionListener(args -> selectInput("Select map (*.jrd3m, *.objw, *objv, *.objp)", "onMapSelect"));
exit = new Button("EXIT_BT", this, 1100, 8, 170, 75, "Exit", true);
exit.setActionListener(args -> exit());
confirm = new Button("CONFIRM_BT", this, 435, 8, 295, 75, "Confirm", true, Editor.OK_MAIN_COLOR, Editor.OK_BACK_COLOR, Editor.OK_LABEL_COLOR);
confirm.setActionListener(args -> {
String sgName = sectorGroupSelection.getSelectedLabel();
if (sgName != null && !sgName.isEmpty()) {
if (interactiveMap.getSelectedSectors() != null && interactiveMap.getSelectedSectors().size() > 0) {
sectorGroupSelection.addElement(sgName, new ArrayList<>(interactiveMap.getSelectedSectors()));
interactiveMap.getSelectedSectors().clear();
}
for (Sector s : sectorGroupSelection.getSelectedElement()) {
s.getSectorData().setString(Editor.DK_VIEW_X, x.getText());
s.getSectorData().setString(Editor.DK_VIEW_Y, y.getText());
s.getSectorData().setString(Editor.DK_VIEW_Z, z.getText());
s.getSectorData().setString(Editor.DK_VIEW_RX, rx.getText());
s.getSectorData().setString(Editor.DK_VIEW_RY, ry.getText());
s.getSectorData().setString(Editor.DK_VIEW_RZ, rz.getText());
}
refreshInputText = true;
}
});
sectorGroupSelection = new ListItem<>("SG_SEL", this, 40, 30, 360, 52, "SECTORS MAP", false);
x = new InputText("X_TX", this, 40, 140, 360, 52, "X AXIS", false, 8, true);
y = new InputText("Y_TX", this, 40, 250, 360, 52, "Y AXIS", false, 8, true);
z = new InputText("Z_TX", this, 40, 360, 360, 52, "Z AXIS", false, 8, true);
rx = new InputText("RX_TX", this, 40, 465, 360, 52, "X ROTATION", false, 8, true);
ry = new InputText("RY_TX", this, 40, 570, 360, 52, "Y ROTATION", false, 8, true);
rz = new InputText("RZ_TX", this, 40, 675, 360, 52, "Z ROTATION", false, 8, true);
}
@Override
protected void update() {
background(0);
pushMatrix();
fill(15, 15, 15);
rect(440, 180, 835, 615);
fill(30, 30, 30);
rect(435, 175, 835, 615);
popMatrix();
if (refreshInputText || sectorGroupSelection.changed()) {
if (sectorGroupSelection.getSelectedElement() != null && sectorGroupSelection.getSelectedElement().size() > 0) {
Sector s = sectorGroupSelection.getSelectedElement().get(0);
if (s != null) {
x.setText(s.getSectorData().getString(Editor.DK_VIEW_X));
y.setText(s.getSectorData().getString(Editor.DK_VIEW_Y));
z.setText(s.getSectorData().getString(Editor.DK_VIEW_Z));
rx.setText(s.getSectorData().getString(Editor.DK_VIEW_RX));
ry.setText(s.getSectorData().getString(Editor.DK_VIEW_RY));
rz.setText(s.getSectorData().getString(Editor.DK_VIEW_RZ));
}
} else {
x.setText("");
y.setText("");
z.setText("");
rx.setText("");
ry.setText("");
rz.setText("");
}
refreshInputText = false;
}
}
// CALLBACKS
public void onMapSelect(File selection) {
try {
if (selection != null) {
WalkMap walkMap;
sectorGroupSelection.reset();
refreshInputText = true;
if (selection.getAbsolutePath().endsWith(".objw")) {
walkMap = new WalkMap(selection.getAbsolutePath());
int i = 0;
for (Sector s : walkMap.getSectors()) {
String name = "S" + i++;
s.setName(name);
walkMap.getSectorByName().put(name, s);
}
} else if (selection.getAbsolutePath().endsWith(".objv")) {
walkMap = new ViewMap(selection.getAbsolutePath());
} else if (selection.getAbsolutePath().endsWith(".objp")) {
walkMap = new PathsMap(selection.getAbsolutePath());
} else if (selection.getAbsolutePath().endsWith(".jrd3m")) {
InputStream file = new FileInputStream(selection.getAbsolutePath());
InputStream buffer = new BufferedInputStream(file);
ObjectInput input = new ObjectInputStream(buffer);
Object res = input.readObject();
if (res instanceof ViewMap) {
walkMap = (ViewMap) res;
sectorGroupSelection.setElements(((ViewMap) walkMap).getElements());
refreshInputText = true;
} else if (res instanceof PathsMap) {
walkMap = (PathsMap) res;
} else {
walkMap = (WalkMap) res;
}
} else {
JOptionPane.showMessageDialog(null, "Only .obj-w/v/p and .jrd3m files are allowed.");
return;
}
if (walkMap != null) {
interactiveMap.setWalkMap(walkMap);
interactiveMap.setFactor(100);
}
}
} catch (Exception e) {
e.printStackTrace();
JOptionPane.showMessageDialog(null, "Error loading walkmap:\n" + e.getMessage());
}
}
public void onMapSave(File selection) {
try {
if (selection != null) {
if (interactiveMap.getWalkMap() instanceof ViewMap) {
sectorGroupSelection.getElements().forEach((sName, sGroup) -> {
for (Sector s : sGroup) {
Vector3f pos = new Vector3f(Float.parseFloat(s.getSectorData().getString(Editor.DK_VIEW_X)), Float.parseFloat(s.getSectorData().getString(Editor.DK_VIEW_Y)), Float.parseFloat(s.getSectorData().getString(Editor.DK_VIEW_Z)));
Vector3f rot = new Vector3f(Float.parseFloat(s.getSectorData().getString(Editor.DK_VIEW_RX)), Float.parseFloat(s.getSectorData().getString(Editor.DK_VIEW_RY)), Float.parseFloat(s.getSectorData().getString(Editor.DK_VIEW_RZ)));
String back = sName + "_B";
String depth = sName + "_D";
CamView camView = new CamView(pos, rot, back, depth);
((ViewMap) interactiveMap.getWalkMap()).addView(s, camView);
}
});
((ViewMap) interactiveMap.getWalkMap()).setElements(sectorGroupSelection.getElements());
}
FileOutputStream fos;
ObjectOutputStream out;
try {
fos = new FileOutputStream(selection);
out = new ObjectOutputStream(fos);
out.writeObject(interactiveMap.getWalkMap());
out.close();
} catch (Exception ex) {
JOptionPane.showMessageDialog(null, "Cannot save " + selection.getAbsolutePath() + " file.");
ex.printStackTrace();
}
}
} catch (Exception e) {
e.printStackTrace();
JOptionPane.showMessageDialog(null, "Error saving walkmap:\n" + e.getMessage());
}
}
}
| apache-2.0 |
sontx/da-cnpm | CinemaProject/src/com/dutproject/cinemaproject/controller/LoginFormServlet.java | 876 | package com.dutproject.cinemaproject.controller;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.dutproject.cinemaproject.model.bean.Account;
/**
* Servlet implementation class LoginFormServlet
*/
@WebServlet(name = "Login", urlPatterns = { "/Login" })
public class LoginFormServlet extends LoginBaseServlet {
private static final long serialVersionUID = 1L;
@Override
protected void doWork(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
if (getPermission(request) == Account.NO_PERMISSION)
request.getServletContext().getRequestDispatcher("/jsp/loginForm.jsp").forward(request, response);
else
navigateToHome(request, response);
}
}
| apache-2.0 |
gbif/parsers | src/main/java/org/gbif/common/parsers/utils/ClassificationUtils.java | 6512 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gbif.common.parsers.utils;
import org.gbif.api.exception.UnparsableException;
import org.gbif.api.model.checklistbank.ParsedName;
import org.gbif.api.vocabulary.Rank;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import static org.gbif.common.parsers.utils.NameParserUtils.PARSER;
/**
* Utilities to work on classifications.
*/
public final class ClassificationUtils {
// used to clean up bad characters
private static final Pattern CLEAN_REG_EX = Pattern.compile("[{}§';_|$%!?]+");
// common null strings to ignore for fast performance.
// Less frequent ones are kept in the blacklisted names dictionary!
public static final Set<String> NULL_STRINGS =
new HashSet<String>(Arrays.asList("/N", "\\", "\\\\", "\\N", "\\\\N", "null", "NULL", "Null"));
private ClassificationUtils() {
throw new UnsupportedOperationException("Can't initialize class");
}
/**
* Parses a canonical name at a specific Rank.
*/
public static String canonicalName(String scientificName, Rank rank) {
ParsedName pn = null;
try {
pn = PARSER.parse(scientificName, rank);
} catch (UnparsableException e) {
}
return pn.canonicalNameWithMarker();
}
/**
* Parses a scientific name and creates the canonical name including a potential hybrid and rank marker
* plus the cultivar and strain names if existing.
* Note: This method once used to only include the hybrid marker - if that is still needed revert to buildName
* method.
*/
public static String canonicalName(String scientificName) {
return canonicalName(scientificName,null);
}
/**
* Cleans up a taxon as far as possible by removing erroneous chars etc.
* This does not do any parsing.
*
* @param taxon to check
*/
public static String clean(String taxon) {
if (StringUtils.isEmpty(taxon) || NULL_STRINGS.contains(taxon)) {
return null;
}
String cleanedTaxon = taxon;
// if it is a single word and ALL "UPPERCASE", turn it into a Capitalised word
// Note: if we lowercase names with multiple words we might accidently create valid looking names by lowercasing the
// author
// for example ABIES ALBA REMSEN will become an Abies alba remsen which will then be interpreted badly
// ABIES ALBA LINNEAUS 1771 will even be Abies alba linneaus 1771, a perfectly formed zoological name
if (!cleanedTaxon.contains(" ") && cleanedTaxon.equals(cleanedTaxon.toUpperCase())) {
cleanedTaxon = cleanedTaxon.substring(0, 1) + cleanedTaxon.substring(1).toLowerCase();
}
// remove the " from names with it at the beginning and end
while (cleanedTaxon.length()> 0 && cleanedTaxon.charAt(0) == '\"' && cleanedTaxon.charAt(cleanedTaxon.length() - 1) == '\"') {
if (cleanedTaxon.length() == 1) {
return null;
}
cleanedTaxon = cleanedTaxon.substring(1, cleanedTaxon.length() - 1);
}
// remove the " from names with it just at the beginning
while (cleanedTaxon.length()> 0 && cleanedTaxon.charAt(0) == '\"') {
if (cleanedTaxon.length() == 1) {
return null;
}
cleanedTaxon = cleanedTaxon.substring(1);
}
// remove the " from names with it just at the end
while (cleanedTaxon.length()> 0 && cleanedTaxon.charAt(cleanedTaxon.length() - 1) == '\"') {
if (cleanedTaxon.length() == 1) {
return null;
}
cleanedTaxon = cleanedTaxon.substring(0, cleanedTaxon.length() - 1);
}
// remove noise
cleanedTaxon = CLEAN_REG_EX.matcher(cleanedTaxon).replaceAll("");
cleanedTaxon = cleanedTaxon.trim();
// don't let any blacklisted names through
if (BlacklistedNames.contains(cleanedTaxon.toUpperCase()) || (!cleanedTaxon.equals(taxon) && BlacklistedNames
.contains(taxon.toUpperCase()))) {
// blacklisted name
return null;
}
return StringUtils.trimToNull(cleanedTaxon);
}
/**
* Clean some noise from the author. A large proportion are "\N" for example.
*
* @param author to clean
*
* @return cleaned author
*/
public static String cleanAuthor(String author) {
if (StringUtils.isEmpty(author) || NULL_STRINGS.contains(author)) {
return null;
}
String cleanedAuthor = author;
// remove the " from names with it at the beginning and end
while (cleanedAuthor.length() >0 && cleanedAuthor.charAt(0) == '\"' && cleanedAuthor.charAt(cleanedAuthor.length() - 1) == '\"') {
if (cleanedAuthor.length() == 1) {
return null;
}
cleanedAuthor = cleanedAuthor.substring(1, cleanedAuthor.length() - 1);
}
// remove noise
cleanedAuthor = CLEAN_REG_EX.matcher(cleanedAuthor).replaceAll("");
cleanedAuthor = cleanedAuthor.trim();
return StringUtils.trimToNull(cleanedAuthor);
}
/**
* Parses a scientific name without knowing its Rank.
* @deprecated unsufficiently documented; should be in NameParser project if it is needed at all
*/
@Deprecated
public static String parseName(String scientificName) {
return parseName(scientificName, null);
}
/**
* Parses a scientific name of a specific rank.
* @deprecated unsufficiently documented; should be in NameParser project if it is needed at all
*/
@Deprecated
public static String parseName(String scientificName, Rank rank) {
try {
ParsedName pn = PARSER.parse(scientificName, rank);
// Handle Aus sp. and Aus bus spp.
if (pn.isIndetermined()) {
pn.setRank(null);
}
return pn.fullName();
} catch (UnparsableException e) {
// TODO: logging
}
// looks dirty, so try and normalize it as best we can and get a canonical at least
String canon = PARSER.parseToCanonical(scientificName, rank);
if (canon != null) {
return canon;
}
return scientificName;
}
}
| apache-2.0 |
pousse-cafe/pousse-cafe | pousse-cafe-core/src/main/java/poussecafe/attribute/entity/SingleEntityAttributeBuilder.java | 1872 | package poussecafe.attribute.entity;
import java.util.Objects;
import java.util.function.Consumer;
import java.util.function.Supplier;
import poussecafe.domain.Entity;
import poussecafe.domain.EntityAttributes;
public class SingleEntityAttributeBuilder<D extends EntityAttributes<?>, E extends Entity<?, D>, F extends D> {
SingleEntityAttributeBuilder() {
}
Class<E> entityClass;
Class<F> dataClass;
public ReadOnlyAttribute read(Supplier<F> getter) {
Objects.requireNonNull(getter);
this.getter = getter;
return new ReadOnlyAttribute();
}
private Supplier<F> getter;
public class ReadOnlyAttribute {
private ReadOnlyAttribute() {
}
public ReadWriteAttribute write(Consumer<F> setter) {
Objects.requireNonNull(setter);
SingleEntityAttributeBuilder.this.setter = setter;
return new ReadWriteAttribute();
}
}
private Consumer<F> setter;
public class ReadWriteAttribute {
private ReadWriteAttribute() {
}
public EntityAttribute<E> build() {
return new EntityAttributeData<>(entityClass) {
@Override
protected D getData() {
D storedData = getter.get();
Objects.requireNonNull(storedData, "Stored Entity data cannot be null");
return storedData;
}
@SuppressWarnings("unchecked")
@Override
protected void setData(D data) {
Objects.requireNonNull(data, "Cannot set null Entity data");
setter.accept((F) data);
}
@Override
public Class entityClass() {
return entityClass;
}
};
}
}
}
| apache-2.0 |
akiellor/selenium | java/server/test/org/openqa/grid/e2e/misc/Grid1HeartbeatTests.java | 2868 | package org.openqa.grid.e2e.misc;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.URL;
import org.apache.http.HttpHost;
import org.apache.http.HttpRequest;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.openqa.grid.e2e.utils.GridConfigurationMock;
import org.openqa.grid.e2e.utils.RegistryTestHelper;
import org.openqa.grid.internal.utils.GridHubConfiguration;
import org.openqa.grid.selenium.SelfRegisteringRemote;
import org.openqa.grid.web.Hub;
import org.openqa.selenium.net.PortProber;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
public class Grid1HeartbeatTests {
private Hub hub;
@BeforeClass(alwaysRun = true)
public void setup() throws Exception {
GridHubConfiguration config = new GridHubConfiguration();
config.setPort(PortProber.findFreePort());
hub = new Hub(config);
hub.start();
}
@Test
public void testIsNotRegistered() throws Exception {
// Send the heartbeat request when we know that there are no nodes
// registered with the hub.
URL heartbeatUrl = new URL(String.format("http://%s:%s/heartbeat?host=localhost&port=5000", hub.getHost(), hub.getPort()));
HttpRequest request = new HttpGet(heartbeatUrl.toString());
DefaultHttpClient client = new DefaultHttpClient();
HttpHost host = new HttpHost(hub.getHost(), hub.getPort());
HttpResponse response = client.execute(host, request);
BufferedReader body = new BufferedReader(new InputStreamReader(response.getEntity().getContent()));
Assert.assertEquals(response.getStatusLine().getStatusCode(), 200);
Assert.assertEquals(body.readLine(), "Hub : Not Registered");
}
@Test
public void testIsRegistered() throws Exception {
// register a selenium 1
SelfRegisteringRemote selenium1 = SelfRegisteringRemote.create(GridConfigurationMock.seleniumConfig(hub.getRegistrationURL()));
selenium1.launchRemoteServer();
selenium1.registerToHub();
RegistryTestHelper.waitForNode(hub.getRegistry(), 1);
// Check that the node is registered with the hub.
URL heartbeatUrl = new URL(String.format("http://%s:%s/heartbeat?host=%s&port=%s", hub.getHost(), hub.getPort(), selenium1.getGridConfig()
.getHost(), selenium1.getGridConfig().getNodeRemoteControlConfiguration().getPort()));
HttpRequest request = new HttpGet(heartbeatUrl.toString());
DefaultHttpClient client = new DefaultHttpClient();
HttpHost host = new HttpHost(hub.getHost(), hub.getPort());
HttpResponse response = client.execute(host, request);
BufferedReader body = new BufferedReader(new InputStreamReader(response.getEntity().getContent()));
Assert.assertEquals(response.getStatusLine().getStatusCode(), 200);
Assert.assertEquals(body.readLine(), "Hub : OK");
}
}
| apache-2.0 |
rlon008/testamation | testamation-core/src/main/java/nz/co/testamation/core/reader/SelectRows.java | 1651 | /*
* Copyright 2016 Ratha Long
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nz.co.testamation.core.reader;
import com.google.common.collect.Lists;
import org.springframework.jdbc.core.JdbcTemplate;
public class SelectRows implements SelectQuery<SQLResult> {
private static final String SELECT_SQL_TEMPLATE = "select * from %s %s";
private final String tableName;
private WhereClauseBuilder where = new WhereClauseBuilder();
public SelectRows( String tableName ) {
this.tableName = tableName;
}
public SelectRows where( String column, String value ) {
where.and( column, value );
return this;
}
@Override
public SQLResult select( JdbcTemplate jdbcTemplate ) {
return new SQLResult(
Lists.transform( jdbcTemplate.queryForList( getSql() ), TableRowImpl::new )
);
}
protected String getSql() {
return String.format( SELECT_SQL_TEMPLATE, tableName, where.toString() );
}
@Override
public String toString() {
return String.format( "SelectRows{table=%s, query=%s}", tableName, getSql() );
}
} | apache-2.0 |
vergilchiu/hive | beeline/src/java/org/apache/hive/beeline/IncrementalRowsWithNormalization.java | 2991 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* This source file is based on code taken from SQLLine 1.0.2
* See SQLLine notice in LICENSE
*/
package org.apache.hive.beeline;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.NoSuchElementException;
import com.google.common.base.Optional;
/**
* Extension of {@link IncrementalRows} which buffers "x" number of rows in memory at a time. It
* uses the {@link BufferedRows} class to do its buffering. The value of "x" is determined by the
* Beeline option <code>--incrementalBufferRows</code>, which defaults to
* {@link BeeLineOpts#DEFAULT_INCREMENTAL_BUFFER_ROWS}. Once the initial set of rows are buffered, it
* will allow the {@link #next()} method to drain the buffer. Once the buffer is empty the next
* buffer will be fetched until the {@link ResultSet} is empty. The width of the rows are normalized
* within each buffer using the {@link BufferedRows#normalizeWidths()} method.
*/
public class IncrementalRowsWithNormalization extends IncrementalRows {
private final int incrementalBufferRows;
private BufferedRows buffer;
IncrementalRowsWithNormalization(BeeLine beeLine, ResultSet rs) throws SQLException {
super(beeLine, rs);
this.incrementalBufferRows = beeLine.getOpts().getIncrementalBufferRows();
this.buffer = new BufferedRows(beeLine, rs, Optional.of(this.incrementalBufferRows));
this.buffer.normalizeWidths();
}
@Override
public boolean hasNext() {
try {
if (this.buffer.hasNext()) {
return true;
} else {
this.buffer = new BufferedRows(this.beeLine, this.rs,
Optional.of(this.incrementalBufferRows));
if (this.normalizingWidths) {
this.buffer.normalizeWidths();
}
// Drain the first Row, which just contains column names
if (!this.buffer.hasNext()) {
return false;
}
this.buffer.next();
return this.buffer.hasNext();
}
} catch (SQLException ex) {
throw new RuntimeException(ex.toString());
}
}
@Override
public Object next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
return this.buffer.next();
}
}
| apache-2.0 |