gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package org.woehlke.btw17.kandidaten.oodm.model; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.Setter; import lombok.ToString; import org.hibernate.validator.constraints.URL; import org.woehlke.btw17.kandidaten.oodm.model.listener.ParteiListener; import org.woehlke.btw17.kandidaten.oodm.model.parts.*; import javax.persistence.*; import javax.validation.Valid; /** * @see org.woehlke.btw17.kandidaten.oodm.model.Kandidat */ @Getter @Setter @ToString @EqualsAndHashCode @Entity @Table( name = "partei", uniqueConstraints = { @UniqueConstraint(name="unique_partei", columnNames = {"partei","partei_lang"}) }, indexes = { @Index(name = "idx_partei_bundeszentrale_politische_bildung", columnList = "bundeszentrale_politische_bildung"), @Index(name = "idx_partei_wahlprogramm", columnList = "wahlprogramm"), @Index(name = "idx_partei_parteiprogramm", columnList = "parteiprogramm"), // @Index(name = "idx_partei_common_fields", columnList = "logo_url,symbol_bild"), // @Index(name = "idx_partei_webseite", columnList = "webseite"), // @Index(name = "idx_partei_geo_position", columnList = "google_maps_url,geo_longitude,geo_lattitude,geo_lattitude,geo_zoom"), // @Index(name = "idx_partei_adresse", columnList = "strasse,hausnummer,plz,ort,nation"), // @Index(name = "idx_partei_twitter", columnList = "twitter"), @Index(name = "idx_partei_facebook", columnList = "facebook"), @Index(name = "idx_partei_youtube", columnList = "youtube"), @Index(name = "idx_partei_logo_url", columnList = "logo_url"), @Index(name = "idx_partei_lobbypedia_url", columnList = "lobbypedia_url"), @Index(name = "idx_partei_wikipedia_article", columnList = "wikipedia_article") } ) @NamedQueries({ @NamedQuery( name = "Partei.getAllIds", query = "select o.id from Partei as o order by o.partei" ), @NamedQuery( name = "Partei.getMaxId", query = "select max(o.id) from Partei as o" ) }) @EntityListeners(ParteiListener.class) public class Partei implements DomainObject,WebseiteEmbedded,OnlineStrategieEmbedded,CommonFieldsEmbedded,GeoPositionEmbedded,AdresseEmbedded { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy = GenerationType.AUTO) @Column(name = "id", updatable = false, nullable = false) protected Long id; @Column(name="partei") private String partei; @Column(name="partei_lang") private String parteiLang; @URL @Column(name="bundeszentrale_politische_bildung") private String bundeszentralePolitischeBildung; @URL @Column(name="wahlprogramm") private String wahlprogramm; @URL @Column(name="parteiprogramm") private String parteiprogramm; @Valid @Embedded private OnlineStrategie onlineStrategie = new OnlineStrategie(); @Valid @Embedded private GeoPosition geoPosition = new GeoPosition(); @Valid @Embedded private Adresse adresse = new Adresse(); @Valid @Embedded private CommonFields commonFields = new CommonFields(); @Valid @Embedded @AssociationOverrides({ @AssociationOverride( name = "agenturen", joinTable = @JoinTable( name = "partei_agentur" ) ) }) private Webseite webseite = new Webseite(); @Transient @Override public String getName() { StringBuilder sb = new StringBuilder(); sb.append(partei); if(partei.compareTo(parteiLang)!=0){ sb.append(" - "); sb.append(parteiLang); } return sb.toString(); } @Transient @Override public String getUniqueId() { return id + ":"+this.getName(); } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getPartei() { return partei; } public void setPartei(String partei) { this.partei = partei; } public String getParteiLang() { return parteiLang; } public void setParteiLang(String parteiLang) { this.parteiLang = parteiLang; } public String getBundeszentralePolitischeBildung() { return bundeszentralePolitischeBildung; } public void setBundeszentralePolitischeBildung(String bundeszentralePolitischeBildung) { this.bundeszentralePolitischeBildung = bundeszentralePolitischeBildung; } public OnlineStrategie getOnlineStrategie() { return onlineStrategie; } public void setOnlineStrategie(OnlineStrategie onlineStrategie) { if(onlineStrategie != null){ this.onlineStrategie = onlineStrategie; } } public CommonFields getCommonFields() { return commonFields; } public void setCommonFields(CommonFields commonFields) { if(commonFields != null){ this.commonFields = commonFields; } } public String getWahlprogramm() { return wahlprogramm; } public void setWahlprogramm(String wahlprogramm) { this.wahlprogramm = wahlprogramm; } public String getParteiprogramm() { return parteiprogramm; } public void setParteiprogramm(String parteiprogramm) { this.parteiprogramm = parteiprogramm; } @Override public GeoPosition getGeoPosition() { return geoPosition; } @Override public void setGeoPosition(GeoPosition geoPosition) { if(geoPosition != null){ this.geoPosition = geoPosition; } } @Override public Adresse getAdresse() { return adresse; } @Override public void setAdresse(Adresse adresse) { if(adresse != null){ this.adresse = adresse; } } @Override public Webseite getWebseite() { return webseite; } @Override public void setWebseite(Webseite webseite) { if(webseite != null){ this.webseite = webseite; } } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof Partei)) return false; Partei partei1 = (Partei) o; if (id != null ? !id.equals(partei1.id) : partei1.id != null) return false; if (partei != null ? !partei.equals(partei1.partei) : partei1.partei != null) return false; if (parteiLang != null ? !parteiLang.equals(partei1.parteiLang) : partei1.parteiLang != null) return false; if (bundeszentralePolitischeBildung != null ? !bundeszentralePolitischeBildung.equals(partei1.bundeszentralePolitischeBildung) : partei1.bundeszentralePolitischeBildung != null) return false; if (wahlprogramm != null ? !wahlprogramm.equals(partei1.wahlprogramm) : partei1.wahlprogramm != null) return false; if (parteiprogramm != null ? !parteiprogramm.equals(partei1.parteiprogramm) : partei1.parteiprogramm != null) return false; if (onlineStrategie != null ? !onlineStrategie.equals(partei1.onlineStrategie) : partei1.onlineStrategie != null) return false; if (geoPosition != null ? !geoPosition.equals(partei1.geoPosition) : partei1.geoPosition != null) return false; if (adresse != null ? !adresse.equals(partei1.adresse) : partei1.adresse != null) return false; if (commonFields != null ? !commonFields.equals(partei1.commonFields) : partei1.commonFields != null) return false; return webseite != null ? webseite.equals(partei1.webseite) : partei1.webseite == null; } @Override public int hashCode() { int result = id != null ? id.hashCode() : 0; result = 31 * result + (partei != null ? partei.hashCode() : 0); result = 31 * result + (parteiLang != null ? parteiLang.hashCode() : 0); result = 31 * result + (bundeszentralePolitischeBildung != null ? bundeszentralePolitischeBildung.hashCode() : 0); result = 31 * result + (wahlprogramm != null ? wahlprogramm.hashCode() : 0); result = 31 * result + (parteiprogramm != null ? parteiprogramm.hashCode() : 0); result = 31 * result + (onlineStrategie != null ? onlineStrategie.hashCode() : 0); result = 31 * result + (geoPosition != null ? geoPosition.hashCode() : 0); result = 31 * result + (adresse != null ? adresse.hashCode() : 0); result = 31 * result + (commonFields != null ? commonFields.hashCode() : 0); result = 31 * result + (webseite != null ? webseite.hashCode() : 0); return result; } @Override public String toString() { return "Partei{" + "id=" + id + ", partei='" + partei + '\'' + ", parteiLang='" + parteiLang + '\'' + ", bundeszentralePolitischeBildung='" + bundeszentralePolitischeBildung + '\'' + ", wahlprogramm='" + wahlprogramm + '\'' + ", parteiprogramm='" + parteiprogramm + '\'' + ", onlineStrategie=" + onlineStrategie + ", geoPosition=" + geoPosition + ", adresse=" + adresse + ", commonFields=" + commonFields + ", webseite=" + webseite + '}'; } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.move.moveInstanceMethod; import com.intellij.codeInsight.ChangeContextUtil; import com.intellij.codeInsight.generation.OverrideImplementUtil; import com.intellij.ide.util.EditorHelper; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Ref; import com.intellij.psi.*; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.javadoc.PsiDocTagValue; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.searches.ClassInheritorsSearch; import com.intellij.psi.search.searches.ReferencesSearch; import com.intellij.psi.util.MethodSignature; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.PsiUtil; import com.intellij.refactoring.BaseRefactoringProcessor; import com.intellij.refactoring.RefactoringBundle; import com.intellij.refactoring.move.MoveInstanceMembersUtil; import com.intellij.refactoring.util.*; import com.intellij.usageView.UsageInfo; import com.intellij.usageView.UsageViewDescriptor; import com.intellij.usageView.UsageViewUtil; import com.intellij.util.IncorrectOperationException; import com.intellij.util.VisibilityUtil; import com.intellij.util.containers.MultiMap; import com.siyeh.ig.psiutils.ExpressionUtils; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import java.util.*; /** * @author ven */ public class MoveInstanceMethodProcessor extends BaseRefactoringProcessor{ private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.move.moveInstanceMethod.MoveInstanceMethodProcessor"); public PsiMethod getMethod() { return myMethod; } public PsiVariable getTargetVariable() { return myTargetVariable; } private PsiMethod myMethod; private PsiVariable myTargetVariable; private PsiClass myTargetClass; private final String myNewVisibility; private final boolean myOpenInEditor; private final Map<PsiClass, String> myOldClassParameterNames; public MoveInstanceMethodProcessor(final Project project, final PsiMethod method, final PsiVariable targetVariable, final String newVisibility, final Map<PsiClass, String> oldClassParameterNames) { this(project, method, targetVariable, newVisibility, false, oldClassParameterNames); } public MoveInstanceMethodProcessor(final Project project, final PsiMethod method, final PsiVariable targetVariable, final String newVisibility, boolean openInEditor, final Map<PsiClass, String> oldClassParameterNames) { super(project); myMethod = method; myTargetVariable = targetVariable; myOpenInEditor = openInEditor; myOldClassParameterNames = oldClassParameterNames; LOG.assertTrue(myTargetVariable instanceof PsiParameter || myTargetVariable instanceof PsiField); LOG.assertTrue(myTargetVariable.getType() instanceof PsiClassType); final PsiType type = myTargetVariable.getType(); LOG.assertTrue(type instanceof PsiClassType); myTargetClass = ((PsiClassType) type).resolve(); myNewVisibility = newVisibility; } @Override @NotNull protected UsageViewDescriptor createUsageViewDescriptor(@NotNull UsageInfo[] usages) { return new MoveInstanceMethodViewDescriptor(myMethod, myTargetVariable, myTargetClass); } @Override protected boolean preprocessUsages(@NotNull Ref<UsageInfo[]> refUsages) { final UsageInfo[] usages = refUsages.get(); MultiMap<PsiElement, String> conflicts = new MultiMap<>(); final Set<PsiMember> members = new HashSet<>(); members.add(myMethod); if (myTargetVariable instanceof PsiField) members.add((PsiMember)myTargetVariable); if (!myTargetClass.isInterface()) { RefactoringConflictsUtil.analyzeAccessibilityConflicts(members, myTargetClass, conflicts, myNewVisibility); } else { for (final UsageInfo usage : usages) { if (usage instanceof InheritorUsageInfo) { RefactoringConflictsUtil.analyzeAccessibilityConflicts( members, ((InheritorUsageInfo)usage).getInheritor(), conflicts, myNewVisibility); } } } if (myTargetVariable instanceof PsiParameter) { PsiParameter parameter = (PsiParameter)myTargetVariable; final int index = myMethod.getParameterList().getParameterIndex(parameter); for (final UsageInfo usageInfo : usages) { if (usageInfo instanceof MethodCallUsageInfo) { final PsiElement methodCall = ((MethodCallUsageInfo)usageInfo).getMethodCallExpression(); if (methodCall instanceof PsiMethodCallExpression) { final PsiExpression[] expressions = ((PsiMethodCallExpression)methodCall).getArgumentList().getExpressions(); if (index < expressions.length) { PsiExpression instanceValue = expressions[index]; instanceValue = RefactoringUtil.unparenthesizeExpression(instanceValue); if (instanceValue instanceof PsiLiteralExpression && ((PsiLiteralExpression)instanceValue).getValue() == null) { String message = RefactoringBundle.message("0.contains.call.with.null.argument.for.parameter.1", RefactoringUIUtil.getDescription(ConflictsUtil.getContainer(methodCall), true), CommonRefactoringUtil.htmlEmphasize(parameter.getName())); conflicts.putValue(instanceValue, message); } } } else if (methodCall instanceof PsiMethodReferenceExpression && shouldBeExpandedToLambda((PsiMethodReferenceExpression)methodCall, index)) { conflicts.putValue(methodCall, RefactoringBundle.message("expand.method.reference.warning")); } } } } try { ConflictsUtil.checkMethodConflicts(myTargetClass, myMethod, getPatternMethod(), conflicts); } catch (IncorrectOperationException ignored) {} return showConflicts(conflicts, usages); } /** * If collapse by second search is possible, then it's possible not to expand */ private boolean shouldBeExpandedToLambda(PsiMethodReferenceExpression referenceExpression, int index) { PsiClassType.ClassResolveResult resolveResult = PsiUtil.resolveGenericsClassInType(referenceExpression.getFunctionalInterfaceType()); PsiMethod interfaceMethod = LambdaUtil.getFunctionalInterfaceMethod(resolveResult); if (interfaceMethod != null) { MethodSignature methodSignature = interfaceMethod.getSignature(LambdaUtil.getSubstitutor(interfaceMethod, resolveResult)); if (index == 0 && methodSignature.getParameterTypes().length > 0 && methodSignature.getParameterTypes()[0].isAssignableFrom(myMethod.getParameterList().getParameters()[0].getType())) { return false; } } return true; } @Override @NotNull protected UsageInfo[] findUsages() { final PsiManager manager = myMethod.getManager(); final GlobalSearchScope searchScope = GlobalSearchScope.allScope(manager.getProject()); final List<UsageInfo> usages = new ArrayList<>(); for (PsiReference ref : ReferencesSearch.search(myMethod, searchScope, false)) { final PsiElement element = ref.getElement(); if (element instanceof PsiReferenceExpression) { boolean isInternal = PsiTreeUtil.isAncestor(myMethod, element, true); usages.add(new MethodCallUsageInfo((PsiReferenceExpression)element, isInternal)); } else if (element instanceof PsiDocTagValue) { usages.add(new JavadocUsageInfo((PsiDocTagValue)element)); } else { throw new UnknownReferenceTypeException(element.getLanguage()); } } if (myTargetClass.isInterface() && !PsiUtil.isLanguageLevel8OrHigher(myTargetClass)) { addInheritorUsages(myTargetClass, searchScope, usages); } final PsiCodeBlock body = myMethod.getBody(); if (body != null) { body.accept(new JavaRecursiveElementWalkingVisitor() { @Override public void visitNewExpression(PsiNewExpression expression) { if (MoveInstanceMembersUtil.getClassReferencedByThis(expression) != null) { usages.add(new InternalUsageInfo(expression)); } super.visitNewExpression(expression); } @Override public void visitReferenceExpression(PsiReferenceExpression expression) { if (MoveInstanceMembersUtil.getClassReferencedByThis(expression) != null) { usages.add(new InternalUsageInfo(expression)); } else if (!expression.isQualified()) { final PsiElement resolved = expression.resolve(); if (myTargetVariable.equals(resolved)) { usages.add(new InternalUsageInfo(expression)); } } super.visitReferenceExpression(expression); } }); } return usages.toArray(UsageInfo.EMPTY_ARRAY); } private static void addInheritorUsages(PsiClass aClass, final GlobalSearchScope searchScope, final List<UsageInfo> usages) { for (PsiClass inheritor : ClassInheritorsSearch.search(aClass, searchScope, false).findAll()) { if (!inheritor.isInterface()) { usages.add(new InheritorUsageInfo(inheritor)); } else { addInheritorUsages(inheritor, searchScope, usages); } } } @Override protected void refreshElements(@NotNull PsiElement[] elements) { LOG.assertTrue(elements.length == 3); myMethod = (PsiMethod) elements[0]; myTargetVariable = (PsiVariable) elements[1]; myTargetClass = (PsiClass) elements[2]; } @Override @NotNull protected String getCommandName() { return RefactoringBundle.message("move.instance.method.command"); } public PsiClass getTargetClass() { return myTargetClass; } @Override protected void performRefactoring(@NotNull UsageInfo[] usages) { PsiMethod patternMethod = createMethodToAdd(); final List<PsiReference> docRefs = new ArrayList<>(); for (UsageInfo usage : usages) { if (usage instanceof InheritorUsageInfo) { final PsiClass inheritor = ((InheritorUsageInfo)usage).getInheritor(); addMethodToClass(inheritor, patternMethod, true); } else if (usage instanceof MethodCallUsageInfo && !((MethodCallUsageInfo)usage).isInternal()) { final PsiElement expression = ((MethodCallUsageInfo)usage).getMethodCallExpression(); if (expression instanceof PsiMethodCallExpression) { correctMethodCall((PsiMethodCallExpression)expression, false); } else if (expression instanceof PsiMethodReferenceExpression) { PsiMethodReferenceExpression methodReferenceExpression = (PsiMethodReferenceExpression)expression; PsiExpression qualifierExpression = methodReferenceExpression.getQualifierExpression(); if (myTargetVariable instanceof PsiParameter && shouldBeExpandedToLambda(methodReferenceExpression, myMethod.getParameterList().getParameterIndex((PsiParameter)myTargetVariable))) { PsiLambdaExpression lambdaExpression = LambdaRefactoringUtil.convertMethodReferenceToLambda(methodReferenceExpression, false, true); if (lambdaExpression != null) { List<PsiExpression> returnExpressions = LambdaUtil.getReturnExpressions(lambdaExpression); if (!returnExpressions.isEmpty()) { correctMethodCall((PsiMethodCallExpression)returnExpressions.get(0), false); } } } else { String exprText; if (myTargetVariable instanceof PsiParameter || qualifierExpression instanceof PsiReferenceExpression && ((PsiReferenceExpression)qualifierExpression).resolve() == myMethod.getContainingClass()) { exprText = myTargetVariable.getType().getCanonicalText(); } else if (qualifierExpression instanceof PsiReferenceExpression) { exprText = qualifierExpression.getText() + "." + myTargetVariable.getName(); } else { exprText = myTargetVariable.getName(); } PsiExpression newQualifier = JavaPsiFacade.getElementFactory(myProject).createExpressionFromText(exprText, null); ((PsiMethodReferenceExpression)expression).setQualifierExpression(newQualifier); JavaCodeStyleManager.getInstance(myProject).shortenClassReferences(expression); } } } else if (usage instanceof JavadocUsageInfo) { docRefs.add(usage.getElement().getReference()); } } try { if (myTargetClass.isInterface()) { final PsiModifierList modifierList = patternMethod.getModifierList(); if (!PsiUtil.isLanguageLevel8OrHigher(myTargetClass)) { patternMethod.getBody().delete(); modifierList.setModifierProperty(PsiModifier.DEFAULT, false); } else { modifierList.setModifierProperty(PsiModifier.DEFAULT, true); } RefactoringUtil.makeMethodAbstract(myTargetClass, patternMethod); } final PsiMethod method = addMethodToClass(myTargetClass, patternMethod, false); myMethod.delete(); for (PsiReference reference : docRefs) { reference.bindToElement(method); } VisibilityUtil.fixVisibility(UsageViewUtil.toElements(usages), method, myNewVisibility); if (myOpenInEditor) { EditorHelper.openInEditor(method); } } catch (IncorrectOperationException e) { LOG.error(e); } } private void correctMethodCall(final PsiMethodCallExpression expression, final boolean isInternalCall) { try { final PsiManager manager = myMethod.getManager(); PsiReferenceExpression methodExpression = expression.getMethodExpression(); if (!methodExpression.isReferenceTo(myMethod)) return; final PsiExpression oldQualifier = methodExpression.getQualifierExpression(); PsiExpression newQualifier = null; final PsiClass classReferencedByThis = MoveInstanceMembersUtil.getClassReferencedByThis(methodExpression); if (myTargetVariable instanceof PsiParameter) { final int index = myMethod.getParameterList().getParameterIndex((PsiParameter)myTargetVariable); final PsiExpression[] arguments = expression.getArgumentList().getExpressions(); if (index < arguments.length) { newQualifier = (PsiExpression)arguments[index].copy(); arguments[index].delete(); } } else { VisibilityUtil.escalateVisibility((PsiField)myTargetVariable, expression); String newQualifierName = myTargetVariable.getName(); if (myTargetVariable instanceof PsiField && oldQualifier != null) { final PsiClass aClass = PsiUtil.resolveClassInClassTypeOnly(oldQualifier.getType()); if (aClass == ((PsiField)myTargetVariable).getContainingClass()) { newQualifierName = oldQualifier.getText() + "." + newQualifierName; } } newQualifier = JavaPsiFacade.getElementFactory(manager.getProject()).createExpressionFromText(newQualifierName, null); } PsiExpression newArgument = null; if (classReferencedByThis != null) { @NonNls String thisArgumentText = null; if (manager.areElementsEquivalent(myMethod.getContainingClass(), classReferencedByThis)) { if (myOldClassParameterNames.containsKey(myMethod.getContainingClass())) { thisArgumentText = "this"; } } else { final String name = classReferencedByThis.getName(); if (name != null) { thisArgumentText = name + ".this"; } else { thisArgumentText = "this"; } } if (thisArgumentText != null) { newArgument = JavaPsiFacade.getElementFactory(manager.getProject()).createExpressionFromText(thisArgumentText, null); } } else { if (!isInternalCall && oldQualifier != null) { final PsiType type = oldQualifier.getType(); if (type instanceof PsiClassType) { final PsiClass resolved = ((PsiClassType)type).resolve(); if (resolved != null && getParameterNameToCreate(resolved) != null) { newArgument = replaceRefsToTargetVariable(oldQualifier); //replace is needed in case old qualifier is e.g. the same as field as target variable } } } } if (newArgument != null) { expression.getArgumentList().add(newArgument); } if (newQualifier != null) { if (newQualifier instanceof PsiThisExpression && ((PsiThisExpression)newQualifier).getQualifier() == null) { //Remove now redundant 'this' qualifier if (oldQualifier != null) oldQualifier.delete(); } else { final PsiReferenceExpression refExpr = (PsiReferenceExpression)JavaPsiFacade.getElementFactory(manager.getProject()) .createExpressionFromText("q." + myMethod.getName(), null); refExpr.getQualifierExpression().replace(newQualifier); methodExpression.replace(refExpr); } } } catch (IncorrectOperationException e) { LOG.error(e); } } private PsiExpression replaceRefsToTargetVariable(final PsiExpression expression) { final PsiManager manager = expression.getManager(); if (ExpressionUtils.isReferenceTo(expression, myTargetVariable)) { return createThisExpr(manager); } expression.accept(new JavaRecursiveElementVisitor() { @Override public void visitReferenceExpression(PsiReferenceExpression expression) { super.visitReferenceExpression(expression); if (expression.isReferenceTo(myTargetVariable)) { try { expression.replace(createThisExpr(manager)); } catch (IncorrectOperationException e) { LOG.error(e); } } } }); return expression; } private static PsiExpression createThisExpr(final PsiManager manager) { try { return JavaPsiFacade.getElementFactory(manager.getProject()).createExpressionFromText("this", null); } catch (IncorrectOperationException e) { LOG.error(e); return null; } } private static PsiMethod addMethodToClass(final PsiClass aClass, final PsiMethod patternMethod, boolean canAddOverride) { try { final PsiMethod method = (PsiMethod)aClass.add(patternMethod); ChangeContextUtil.decodeContextInfo(method, null, null); if (canAddOverride && OverrideImplementUtil.isInsertOverride(method, aClass)) { method.getModifierList().addAnnotation(CommonClassNames.JAVA_LANG_OVERRIDE); } return method; } catch (IncorrectOperationException e) { LOG.error(e); return null; } } private PsiMethod createMethodToAdd () { ChangeContextUtil.encodeContextInfo(myMethod, true); try { final PsiManager manager = myMethod.getManager(); JavaPsiFacade facade = JavaPsiFacade.getInstance(manager.getProject()); final PsiElementFactory factory = facade.getElementFactory(); //correct internal references final PsiCodeBlock body = myMethod.getBody(); if (body != null) { final Map<PsiElement, PsiElement> replaceMap = new HashMap<>(); body.accept(new JavaRecursiveElementVisitor() { @Override public void visitThisExpression(PsiThisExpression expression) { final PsiClass classReferencedByThis = MoveInstanceMembersUtil.getClassReferencedByThis(expression); if (classReferencedByThis != null && !PsiTreeUtil.isAncestor(myMethod, classReferencedByThis, false)) { final PsiElementFactory factory = JavaPsiFacade.getElementFactory(myProject); String paramName = getParameterNameToCreate(classReferencedByThis); try { final PsiExpression refExpression = factory.createExpressionFromText(paramName, null); replaceMap.put(expression, refExpression); } catch (IncorrectOperationException e) { LOG.error(e); } } } @Override public void visitReferenceExpression(PsiReferenceExpression expression) { try { final PsiExpression qualifier = expression.getQualifierExpression(); final PsiElement resolved = expression.resolve(); if (ExpressionUtils.isReferenceTo(qualifier, myTargetVariable)) { if (resolved instanceof PsiField) { String fieldName = ((PsiField)resolved).getName(); LOG.assertTrue(fieldName != null); for (PsiParameter parameter : myMethod.getParameterList().getParameters()) { if (Comparing.strEqual(parameter.getName(), fieldName) || facade.getResolveHelper().resolveReferencedVariable(fieldName, expression) != null) { qualifier.replace(factory.createExpressionFromText("this", null)); return; } } } if (expression instanceof PsiMethodReferenceExpression) { qualifier.replace(factory.createExpressionFromText("this", null)); } else { //Target is a field, replace target.m -> m qualifier.delete(); } return; } if (myTargetVariable.equals(resolved)) { PsiThisExpression thisExpression = RefactoringChangeUtil.createThisExpression(manager, PsiTreeUtil.isAncestor(myMethod, PsiTreeUtil.getParentOfType(expression, PsiClass.class), true) ? myTargetClass : null); replaceMap.put(expression, thisExpression); return; } else if (myMethod.equals(resolved)) { } else { PsiClass classReferencedByThis = MoveInstanceMembersUtil.getClassReferencedByThis(expression); if (classReferencedByThis != null) { final String paramName = getParameterNameToCreate(classReferencedByThis); if (paramName != null) { PsiReferenceExpression newQualifier = (PsiReferenceExpression)factory.createExpressionFromText(paramName, null); expression.setQualifierExpression(newQualifier); return; } } } super.visitReferenceExpression(expression); } catch (IncorrectOperationException e) { LOG.error(e); } } @Override public void visitNewExpression(PsiNewExpression expression) { try { final PsiExpression qualifier = expression.getQualifier(); if (ExpressionUtils.isReferenceTo(qualifier, myTargetVariable)) { //Target is a field, replace target.new A() -> new A() qualifier.delete(); } else { final PsiClass classReferencedByThis = MoveInstanceMembersUtil.getClassReferencedByThis(expression); if (classReferencedByThis != null) { if (qualifier != null) qualifier.delete(); final String paramName = getParameterNameToCreate(classReferencedByThis); final PsiExpression newExpression = factory.createExpressionFromText(paramName + "." + expression.getText(), null); replaceMap.put(expression, newExpression); } } super.visitNewExpression(expression); } catch (IncorrectOperationException e) { LOG.error(e); } } @Override public void visitMethodCallExpression(PsiMethodCallExpression expression) { correctMethodCall(expression, true); super.visitMethodCallExpression(expression); } }); for (PsiElement element : replaceMap.keySet()) { final PsiElement replacement = replaceMap.get(element); element.replace(replacement); } } final PsiMethod methodCopy = getPatternMethod(); final List<PsiParameter> newParameters = Arrays.asList(methodCopy.getParameterList().getParameters()); RefactoringUtil.fixJavadocsForParams(methodCopy, new HashSet<>(newParameters)); return methodCopy; } catch (IncorrectOperationException e) { LOG.error(e); return myMethod; } } private PsiMethod getPatternMethod() throws IncorrectOperationException { final PsiMethod methodCopy = (PsiMethod)myMethod.copy(); String name = myTargetClass.isInterface() ? PsiModifier.PUBLIC : !Comparing.strEqual(myNewVisibility, VisibilityUtil.ESCALATE_VISIBILITY) ? myNewVisibility : null; if (name != null) { PsiUtil.setModifierProperty(methodCopy, name, true); } if (myTargetVariable instanceof PsiParameter) { final int index = myMethod.getParameterList().getParameterIndex((PsiParameter)myTargetVariable); methodCopy.getParameterList().getParameters()[index].delete(); } addParameters(JavaPsiFacade.getElementFactory(myProject), methodCopy, myTargetClass.isInterface()); return methodCopy; } private void addParameters(final PsiElementFactory factory, final PsiMethod methodCopy, final boolean isInterface) throws IncorrectOperationException { final Set<Map.Entry<PsiClass, String>> entries = myOldClassParameterNames.entrySet(); for (final Map.Entry<PsiClass, String> entry : entries) { final PsiClassType type = factory.createType(entry.getKey()); final PsiParameter parameter = factory.createParameter(entry.getValue(), type); if (isInterface) { PsiUtil.setModifierProperty(parameter, PsiModifier.FINAL, false); } methodCopy.getParameterList().add(parameter); } } private String getParameterNameToCreate(@NotNull PsiClass aClass) { return myOldClassParameterNames.get(aClass); } }
/* Derby - Class com.pivotal.gemfirexd.internal.impl.store.access.btree.index.B2IRowLocking3 Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.pivotal.gemfirexd.internal.impl.store.access.btree.index; import com.pivotal.gemfirexd.internal.iapi.error.StandardException; import com.pivotal.gemfirexd.internal.iapi.services.sanity.SanityManager; import com.pivotal.gemfirexd.internal.iapi.store.access.ConglomerateController; import com.pivotal.gemfirexd.internal.iapi.store.access.TransactionController; import com.pivotal.gemfirexd.internal.iapi.store.access.conglomerate.TransactionManager; import com.pivotal.gemfirexd.internal.iapi.store.raw.FetchDescriptor; import com.pivotal.gemfirexd.internal.iapi.store.raw.LockingPolicy; import com.pivotal.gemfirexd.internal.iapi.store.raw.RecordHandle; import com.pivotal.gemfirexd.internal.iapi.store.raw.Transaction; import com.pivotal.gemfirexd.internal.iapi.types.DataValueDescriptor; import com.pivotal.gemfirexd.internal.iapi.types.RowLocation; import com.pivotal.gemfirexd.internal.impl.store.access.btree.BTree; import com.pivotal.gemfirexd.internal.impl.store.access.btree.BTreeLockingPolicy; import com.pivotal.gemfirexd.internal.impl.store.access.btree.BTreeRowPosition; import com.pivotal.gemfirexd.internal.impl.store.access.btree.ControlRow; import com.pivotal.gemfirexd.internal.impl.store.access.btree.LeafControlRow; import com.pivotal.gemfirexd.internal.impl.store.access.btree.OpenBTree; import com.pivotal.gemfirexd.internal.impl.store.access.btree.WaitError; /** Implements the jdbc serializable isolation level using row locks. <p> Holds read and write locks until end of transaction. Obtains previous key locks to protect from phantom reads. **/ class B2IRowLocking3 implements BTreeLockingPolicy { /************************************************************************** * Private/Protected fields of This class: ************************************************************************** */ /** * The container id of the base container for this index. Used to build * record handles to make lock calls on. **/ protected ConglomerateController base_cc; /** * The OpenBtree to use if we have to lock anything in the btree vs. * base row locking. **/ protected OpenBTree open_btree; /** * The locking policy to use to get and release the scan locks. We could * cache this somewhere better. **/ private LockingPolicy scan_locking_policy; /** * The transaction to associate lock requests with. **/ private Transaction rawtran; /************************************************************************** * Constructors for This class: ************************************************************************** */ B2IRowLocking3( Transaction rawtran, int lock_level, LockingPolicy locking_policy, ConglomerateController base_cc, OpenBTree open_btree) { this.rawtran = rawtran; this.base_cc = base_cc; this.open_btree = open_btree; this.scan_locking_policy = rawtran.newLockingPolicy( LockingPolicy.MODE_RECORD, TransactionController.ISOLATION_READ_COMMITTED, true); } /************************************************************************** * Private methods of This class: ************************************************************************** */ private boolean _lockScan( RecordHandle rh, boolean forUpdate, boolean wait) throws StandardException { boolean ret_val = true; // only get the scan lock if we are record locking. if (!forUpdate) { ret_val = scan_locking_policy.lockRecordForRead( rawtran, open_btree.getContainerHandle(), rh, wait, false); } else { ret_val = scan_locking_policy.lockRecordForWrite( rawtran, rh, false, wait); } return(ret_val); } /** * Lock key previous to first key in btree. * <p> * In the previous key locking protocol repeatable read and phantom * protection is guaranteed by locking a range of keys in the btree. * The range is defined by the key previous to the first key you look * at and all subsequent keys you look at. The first key in the index * is a special case, as there are no keys previous to it. In that * case a special key is declared the "previous key" to the first key * in the btree and is locked instead. * <p> * In this implementation that first key is defined to be in the base * container, page ContainerHandle.FIRST_PAGE_NUMBER, record id * PREVIOUS_KEY_HANDLE. * <p> * Note that the previous key is the same for all indexes on a given * conglomerate. It seemed better for all locks on a base table to have * the same containerid, rather than having some locks generated from * a btree have a containerid from base table and some having a containerid * from the btree. If this turns out to be a problem we could either * have 2 different containerid's, be more creative with the record id, or * even add more to the lock key. * * @param aux_leaf If non-null, this leaf is unlatched if the * routine has to wait on the lock. * @param lock_operation Whether to lock exclusive or share. * @param lock_duration For what duration should the lock be held, * if INSTANT_DURATION, then the routine will * guarantee that lock was acquired while holding * the latch, but then immediately release the * lock. If COMMIT_DURATION or MANUAL_DURATION * then the lock be held when routine returns * successfully. * * @exception StandardException Standard exception policy. **/ private boolean lockPreviousToFirstKey( LeafControlRow current_leaf, LeafControlRow aux_leaf, int lock_operation, int lock_duration) throws StandardException { // This is first row in table, lock the special key that // represents the key previous to the first key of the table. // First try to get the lock NOWAIT, while latch is held. boolean ret_status = base_cc.lockRow( BTree.ROOTPAGEID, RecordHandle.PREVIOUS_KEY_HANDLE, lock_operation, false /* NOWAIT */, lock_duration); if (!ret_status) { current_leaf.release(); current_leaf = null; if (aux_leaf != null) { aux_leaf.release(); aux_leaf = null; } // Couldn't get the lock NOWAIT, release latch and wait for lock. base_cc.lockRow( BTree.ROOTPAGEID, RecordHandle.PREVIOUS_KEY_HANDLE, lock_operation, true /* WAIT */, lock_duration); } return(ret_status); } /** * Lock a btree row (row is at given slot in page). * <p> * Lock the row at the given slot in the page. Meant to be used if caller * only has the slot on the page to be locked, and has not read the row * yet. This routine fetches the row location field from the page, and then * locks that rowlocation in the base container. * <p> * Lock a btree row, enforcing the standard lock/latch protocol. * On return the row is locked. Return status indicates if the lock * was waited for, which will mean a latch was dropped while waiting. * In general a false status means that the caller will either have * to research the tree unless some protocol has been implemented that * insures that the row will not have moved while the latch was dropped. * <p> * This routine request a row lock NOWAIT on the in-memory row * "current_row.". If the lock is granted the routine will return true. * If the lock cannot be granted NOWAIT, then the routine will release * the latch on "current_leaf" and "aux_leaf" (if aux_leaf is non-null), * and then it will request a WAIT lock on the row. * <p> * * @param btree The conglomerate we are locking. * @param current_leaf Latched current leaf where "current" key is. * @param aux_leaf If non-null, this leaf is unlatched if the * routine has to wait on the lock. * @param current_slot Slot of row to lock. * @param lock_fetch_desc Descriptor for fetching just the RowLocation, * used for locking. * @param check_changed_rowloc * whether to check for the changed rowloc or not. * @param lock_operation Whether lock is for key prev to insert or not. * @param lock_duration For what duration should the lock be held, * if INSTANT_DURATION, then the routine will * guarantee that lock was acquired while holding * the latch, but then immediately release the * lock. If COMMIT_DURATION or MANUAL_DURATION * then the lock be held when routine returns * successfully. * * @exception StandardException Standard exception policy. **/ private boolean lockRowOnPage( BTree btree, LeafControlRow current_leaf, LeafControlRow aux_leaf, int current_slot, boolean check_changed_rowloc, FetchDescriptor lock_fetch_desc, DataValueDescriptor[] lock_template, RowLocation lock_row_loc, int lock_operation, int lock_duration) throws StandardException { if (SanityManager.DEBUG) { SanityManager.ASSERT(current_leaf != null); if (current_slot <= 0 || current_slot >= current_leaf.getPage().recordCount()) { SanityManager.THROWASSERT( "current_slot = " + current_slot + "; current_leaf.getPage().recordCount() = " + current_leaf.getPage().recordCount()); } if (!(btree instanceof B2I)) { SanityManager.THROWASSERT( "btree not instance of B2I, it is " + btree.getClass().getName()); } SanityManager.ASSERT(lock_template != null, "template is null"); // For now the RowLocation is expected to be the object located in // the last column of the lock_template, this may change if we // ever support rows with RowLocations somewhere else. SanityManager.ASSERT( lock_row_loc == lock_template[lock_template.length - 1], "row_loc is not the object in last column of lock_template."); } // Fetch the row location to lock. RecordHandle rec_handle = current_leaf.getPage().fetchFromSlot( (RecordHandle) null, current_slot, lock_template, lock_fetch_desc, true); // First try to get the lock NOWAIT, while latch is held. boolean ret_status = base_cc.lockRow( lock_row_loc, lock_operation, false /* NOWAIT */, lock_duration); if (!ret_status) { // Could not get the lock NOWAIT, release latch and wait for lock. if (current_leaf != null) { current_leaf.release(); current_leaf = null; } if (aux_leaf != null) { aux_leaf.release(); aux_leaf = null; } base_cc.lockRow( lock_row_loc, lock_operation, true /* WAIT */, lock_duration); } return(ret_status); } /** * move left in btree and lock previous key. * <p> * Enter routine with "current_leaf" latched. This routine implements * the left travel ladder locking protocol to search the leaf pages from * right to left for the previous key to 1st key on current_leaf. * * There are 2 cases: * 1) the previous page has keys, in which case the last key on that * page is locked, other wise search continues on the next page to * the left. * 2) there are no keys on the current page and there is no page to the * left. In this case the special "leftmost key" lock is gotten by * calling lockPreviousToFirstKey(). * * Left laddar locking is used if all latches can be obtained immediately * with NOWAIT. This means that current latch is held while asking for * left latch NOWAIT, and if left latch is granted then subsequently * current latch can be released. If this protocol is followed and * all latches are granted then caller is guaranteed that the correct * previous key has been locked and current_page latch remains. The * NOWAIT protocol is used to avoid latch/latch deadlocks. The overall * protocol is that one never holds a latch while waiting on another unless * the direction of travel is down and to the right. * <p> * If along the search a latch has to be waited on then latches are * released and a wait is performed, and "false" status is returned to * caller. In this case the routine can no longer be sure of it's current * position and may have to retry the whole operation. * * @return true if previous key found without ever waiting on a latch, * false if latch released in order to wait for other latch. * * @exception StandardException Standard exception policy. **/ private boolean searchLeftAndLockPreviousKey( B2I b2i, LeafControlRow current_leaf, int current_slot, FetchDescriptor lock_fetch_desc, DataValueDescriptor[] lock_template, RowLocation lock_row_loc, OpenBTree open_btree, int lock_operation, int lock_duration) throws StandardException { boolean latches_released = false; LeafControlRow prev_leaf; LeafControlRow prev_prev_leaf; try { // Move left in tree, page latch will be requested nowait, // and WaitError will be thrown if latch not granted. prev_leaf = (LeafControlRow) current_leaf.getLeftSibling(open_btree); } catch (WaitError e) { // initial latch request on leaf left of current could not be // granted NOWAIT. long previous_pageno = current_leaf.getleftSiblingPageNumber(); current_leaf.release(); current_leaf = null; // wait on the left leaf, which we could not be granted NOWAIT. prev_leaf = (LeafControlRow) ControlRow.get(open_btree, previous_pageno); latches_released = true; } while (true) { try { // loop searching left in the btree until you either find // a record to lock, or you reach the leftmost empty leaf. if (prev_leaf.getPage().recordCount() > 1) { // lock the last row on the page, which is the previous // record to the first row on the next page. boolean ret_status = lockRowOnPage( b2i, prev_leaf, current_leaf, prev_leaf.getPage().recordCount() - 1, false, lock_fetch_desc, lock_template, lock_row_loc, lock_operation, lock_duration); if (!ret_status) { // needed to wait on a row lock, so both prev_leaf and // current_leaf latches have been released by // lockRowOnPage() prev_leaf = null; current_leaf = null; latches_released = true; } break; } else if (prev_leaf.isLeftmostLeaf()) { // Table's first row, lock the key that represents the // key previous to first key of the table. boolean ret_status = lockPreviousToFirstKey( prev_leaf, current_leaf, lock_operation, lock_duration); if (!ret_status) { // needed to wait on a row lock, so both prev_leaf and // current_leaf latches have been released by // lockPreviousToFirstKey() prev_leaf = null; current_leaf = null; latches_released = true; } break; } // Move left in tree, page latch will be requested nowait, // and WaitError will be thrown if latch not granted. // Release latches on pages between "current_leaf" and // where the search leads, so that at most 3 latched pages // (current_leaf, prev_leaf, prev_prev_leaf) are held during // the search. Do left ladder locking as you walk left. prev_prev_leaf = (LeafControlRow) prev_leaf.getLeftSibling(open_btree); prev_leaf.release(); prev_leaf = prev_prev_leaf; prev_prev_leaf = null; } catch (WaitError e) { long previous_pageno = prev_leaf.getleftSiblingPageNumber(); // error going left. Release current page latch and // original page latch continue the search. if (current_leaf != null) { // current_leaf may have already been released as part of // previous calls, need to check null status. current_leaf.release(); current_leaf = null; } // can only get here by above getLeftSibling() call so prev_leaf // should always be valid and latched at this point. No null // check necessary. prev_leaf.release(); prev_leaf = null; // wait on the left page, which we could not get before. prev_leaf = (LeafControlRow) ControlRow.get(open_btree, previous_pageno); latches_released = true; } } if (prev_leaf != null) prev_leaf.release(); return(!latches_released); } /************************************************************************** * Protected methods of This class: ************************************************************************** */ /** * Lock a row as part of doing the scan. * <p> * Lock the row at the given slot (or the previous row if slot is 0). * Get the scan lock on the page if "request_scan_lock" is true. * <p> * If this routine returns true all locks were acquired while maintaining * the latch on leaf. If this routine returns false, locks may or may * not have been acquired, and the routine should be called again after * the client has researched the tree to reget the latch on the * appropriate page. * (p> * As a sided effect stores the value of the record handle of the current * scan lock. * * @return Whether locks were acquired without releasing latch on leaf. * * @param open_btree The open_btree to associate latches with - * used if routine has to scan backward. * @param btree the conglomerate info. * @param pos The position of the row to lock. * @param request_row_lock Whether to request the row lock, should * only be requested once per page in the scan. * @param request_scan_lock Whether to request the page scan lock, should * only be requested once per page in the scan. * @param lock_fetch_desc The fetch descriptor to use to fetch the * row location for the lock request. * @param lock_template A scratch area to use to read in rows. * @param previous_key_lock Is this a previous key lock call? * @param forUpdate Is the scan for update or for read only. * * @exception StandardException Standard exception policy. **/ protected boolean _lockScanRow( OpenBTree open_btree, BTree btree, BTreeRowPosition pos, boolean request_row_lock, boolean request_scan_lock, FetchDescriptor lock_fetch_desc, DataValueDescriptor[] lock_template, RowLocation lock_row_loc, boolean previous_key_lock, boolean forUpdate, int lock_operation) throws StandardException { boolean latch_released = false; B2I b2i = (B2I) btree; if (request_row_lock) { // In order to implement a serialized scan based on previous // key locking, this method acquires a row lock on // the base table's row from the index row at [startpage/startslot]. // This will be the 'previous key'. if (pos.current_slot == 0) { // this call will take care of searching left in the btree // to find the previous row to lock, 0 is the control row and // not a valid thing to lock as a previous key. // it is ok to call the non-scan as this is just a special // case of a previous key lock call. The only scan code that // will call this routine with slot == 0 will retry if this // routine returns that a latch was released. latch_released = !lockNonScanPreviousRow( btree, pos.current_leaf, 1 /* lock row previous to row at slot 1 */, lock_fetch_desc, lock_template, lock_row_loc, open_btree, lock_operation, TransactionManager.LOCK_COMMIT_DURATION); // special test to see if latch release code works if (SanityManager.DEBUG) { latch_released = OpenBTree.test_errors( open_btree, "B2iRowLocking3_1_lockScanRow", false, this, pos.current_leaf, latch_released); } } else { // Just lock the row at "slot" latch_released = !lockRowOnPage( btree, pos.current_leaf, (LeafControlRow) null /* no other latch currently */, pos.current_slot, true, lock_fetch_desc, lock_template, lock_row_loc, lock_operation, TransactionManager.LOCK_COMMIT_DURATION); // special test to see if latch release code works if (SanityManager.DEBUG) { latch_released = OpenBTree.test_errors( open_btree, "B2iRowLocking3_2_lockScanRow", false, this, pos.current_leaf, latch_released); } } } if (request_scan_lock && !latch_released) { // Get the scan lock on the start page. // Get shared RECORD_ID_PROTECTION_HANDLE lock to make sure that // we wait for scans in other transactions to move off of this page // before we split. latch_released = !lockScan( pos.current_leaf, (LeafControlRow) null, // no other latch currently false, ConglomerateController.LOCK_READ);// read scan lock position // special test to see if latch release code works if (SanityManager.DEBUG) { /* RESOLVE - need to get a container here */ latch_released = OpenBTree.test_errors( open_btree, "B2iRowLocking3_3_lockScanRow", true, this, pos.current_leaf, latch_released); } } return(!latch_released); } /************************************************************************** * Public Methods of This class: ************************************************************************** */ /************************************************************************** * Abstract Protected lockScan*() locking methods of BTree: * lockScan - lock the scan page * lockScanForReclaimSpace - lock page for reclaiming deleted rows. * lockScanRow - lock row and possibly the scan page * unlockScan - unlock the scan page * unlockScanRecordAfterRead- unlock the scan record ************************************************************************** */ /** * Lock a control row page for scan. * <p> * Scanners get shared lock on the page while positioned on a row within * the page, splitter/purgers/mergers get exclusive lock on the page. * * See BTree.lockScan() for more info. * * @exception StandardException Standard exception policy. **/ public boolean lockScan( LeafControlRow current_leaf, ControlRow aux_control_row, boolean forUpdate, int lock_operation) throws StandardException { // The scan page lock is implemented as a row lock on the reserved // row id on the page (RecordHandle.RECORD_ID_PROTECTION_HANDLE). RecordHandle scan_lock_rh = current_leaf.getPage().getProtectionRecordHandle(); // First try to get the lock NOWAIT, while latch is held. boolean ret_status = _lockScan(scan_lock_rh, forUpdate, false /* NOWAIT */); if (!ret_status) { current_leaf.release(); current_leaf = null; if (aux_control_row != null) { aux_control_row.release(); aux_control_row = null; } // Could not get the lock NOWAIT, release latch and wait // for the lock. _lockScan(scan_lock_rh, forUpdate, true /* WAIT */); // once we get the lock, give it up as we need to get the lock // while we have the latch. When the lock manager gives us the // ability to do instantaneous locks do that. We just wait on the // lock to give the split a chance to finish before we interfere. if (!forUpdate) { scan_locking_policy.unlockRecordAfterRead( rawtran, open_btree.getContainerHandle(), scan_lock_rh, false, true); } else { // RESOLVE - need instantaneous locks as there is no way // currently to release a write lock. This lock will only // be requested by split, and will be released by internal // transaction. } } return(ret_status); } /** * Lock a control row page for reclaiming deleted rows. * <p> * When reclaiming deleted rows during split need to get an exclusive * scan lock on the page, which will mean there are no other scans * positioned on the page. If there are other scans positioned, just * give up on reclaiming space now. * * @return true if lock was granted nowait, else false and not lock was * granted. * * @exception StandardException Standard exception policy. **/ public boolean lockScanForReclaimSpace( LeafControlRow current_leaf) throws StandardException { // The scan page lock is implemented as a row lock on the reserved // row id on the page (RecordHandle.RECORD_ID_PROTECTION_HANDLE). RecordHandle scan_lock_rh = current_leaf.getPage().getProtectionRecordHandle(); // First try to get the lock NOWAIT, while latch is held. return( _lockScan(scan_lock_rh, true /* update */, false /* NOWAIT */)); } /** * Lock a btree row to determine if it is a committed deleted row. * <p> * @see BTreeLockingPolicy#lockScanCommittedDeletedRow * * @exception StandardException Standard exception policy. **/ public boolean lockScanCommittedDeletedRow( OpenBTree open_btree, LeafControlRow leaf, DataValueDescriptor[] template, FetchDescriptor lock_fetch_desc, int slot_no) throws StandardException { if (SanityManager.DEBUG) { SanityManager.ASSERT(leaf != null); if (slot_no <= 0 || slot_no >= leaf.getPage().recordCount()) { SanityManager.THROWASSERT( "slot_no = " + slot_no + "; leaf.getPage().recordCount() = " + leaf.getPage().recordCount()); } SanityManager.ASSERT(template != null, "template is null"); } RowLocation row_loc = (RowLocation) template[((B2I) open_btree.getConglomerate()).rowLocationColumn]; // Fetch the row location to lock. leaf.getPage().fetchFromSlot( (RecordHandle) null, slot_no, template, lock_fetch_desc, true); // Request the lock NOWAIT, return status return( base_cc.lockRow(row_loc, ConglomerateController.LOCK_UPD, false /* NOWAIT */, TransactionManager.LOCK_COMMIT_DURATION)); } /** * Lock a row as part of doing the scan. * <p> * Lock the row at the given slot (or the previous row if slot is 0). * Get the scan lock on the page if "request_scan_lock" is true. * <p> * If this routine returns true all locks were acquired while maintaining * the latch on leaf. If this routine returns false, locks may or may * not have been acquired, and the routine should be called again after * the client has researched the tree to reget the latch on the * appropriate page. * (p> * As a sided effect stores the value of the record handle of the current * scan lock. * * @return Whether locks were acquired without releasing latch on leaf. * * @param open_btree The open_btree to associate latches with - * used if routine has to scan backward. * @param btree the conglomerate info. * @param pos The position of the row to lock. * @param request_scan_lock Whether to request the page scan lock, should * only be requested once per page in the scan. * @param lock_template A scratch area to use to read in rows. * @param previous_key_lock Is this a previous key lock call? * @param forUpdate Is the scan for update or for read only. * * @exception StandardException Standard exception policy. **/ public boolean lockScanRow( OpenBTree open_btree, BTree btree, BTreeRowPosition pos, boolean request_scan_lock, FetchDescriptor lock_fetch_desc, DataValueDescriptor[] lock_template, RowLocation lock_row_loc, boolean previous_key_lock, boolean forUpdate, int lock_operation) throws StandardException { return( _lockScanRow( open_btree, btree, pos, true, // request the row lock (always true for iso 3 ) request_scan_lock, lock_fetch_desc, lock_template, lock_row_loc, previous_key_lock, forUpdate, lock_operation)); } /** * Release read lock on a row. * * For serializable, there is no work to do. * * **/ public void unlockScanRecordAfterRead( BTreeRowPosition pos, boolean forUpdate) throws StandardException { return; } /** * Release the lock gotten by calling lockScan. This call can only be * made to release read scan locks, write scan locks must be held until * end of transaction. * <p> * See BTree.unlockScan() for more info. * **/ public void unlockScan(RecordHandle scan_lock_rh) { // This is first row in table, lock the special key that // represents the key previous to the first key of the table. try { scan_locking_policy.unlockRecordAfterRead( rawtran, open_btree.getContainerHandle(), scan_lock_rh, false, true); } catch (StandardException se) { if (SanityManager.DEBUG) SanityManager.THROWASSERT(se); } } /************************************************************************** * Abstract Protected lockNonScan*() locking methods of BTree: * * lockNonScanPreviousRow - lock the row previous to the current * lockNonScanRow - lock the input row ************************************************************************** */ /** * Lock the row previous to the input row. * <p> * See BTreeLockingPolicy.lockNonScanPreviousRow * * @exception StandardException Standard exception policy. **/ public boolean lockNonScanPreviousRow( BTree btree, LeafControlRow current_leaf, int current_slot, FetchDescriptor lock_fetch_desc, DataValueDescriptor[] lock_template, RowLocation lock_row_loc, OpenBTree open_btree, int lock_operation, int lock_duration) throws StandardException { boolean ret_status; if (SanityManager.DEBUG) { SanityManager.ASSERT(btree instanceof B2I); } if (current_slot > 1) { // Easy case, just lock the key previous to the current one. // Lock (current_slot - 1) ret_status = lockRowOnPage( btree, current_leaf, (LeafControlRow) null, current_slot - 1, false, lock_fetch_desc, lock_template, lock_row_loc, lock_operation, lock_duration); } else { // Should only be called while pointing at a valid location, 0 // is not a valid key slot - it is the control row. if (SanityManager.DEBUG) SanityManager.ASSERT(current_slot == 1); if (current_leaf.isLeftmostLeaf()) { // This is first row in table, lock the special key that // represents the key previous to the first key of the table. ret_status = lockPreviousToFirstKey( current_leaf, (LeafControlRow) null, lock_operation, lock_duration); } else { // The previous key is on a previous page, search left // through the pages to find the key to latch. // RESOLVE RLL (mikem) - do I need to do the // RECORD_ID_PROTECTION_HANDLE lock. // First guarantee that record id's will not move off this // current page while searching for previous key, by getting // the RECORD_ID_PROTECTION_HANDLE lock on the current page. // Since we have a latch on the cur // RESOLVE RLL (mikem) - NO RECORD_ID PROTECTION IN EFFECT. // caller must research, get new locks if this routine // releases latches. ret_status = this.searchLeftAndLockPreviousKey( (B2I) btree, current_leaf, current_slot, lock_fetch_desc, lock_template, lock_row_loc, open_btree, lock_operation, lock_duration); } } return(ret_status); } /** * Lock the in memory row. * <p> * See BTree.lockRow() for more info. * * @exception StandardException Standard exception policy. **/ public boolean lockNonScanRow( BTree btree, LeafControlRow current_leaf, LeafControlRow aux_leaf, DataValueDescriptor[] current_row, int lock_operation) throws StandardException { if (SanityManager.DEBUG) { SanityManager.ASSERT(btree instanceof B2I); } B2I b2i = (B2I) btree; // First try to get the lock NOWAIT, while latch is held. boolean ret_status = base_cc.lockRow( (RowLocation) current_row[b2i.rowLocationColumn], lock_operation, false /* NOWAIT */, TransactionManager.LOCK_COMMIT_DURATION); if (!ret_status) { // Could not get the lock NOWAIT, release latch and wait for lock. if (current_leaf != null) { current_leaf.release(); current_leaf = null; } if (aux_leaf != null) { aux_leaf.release(); aux_leaf = null; } base_cc.lockRow( (RowLocation) current_row[b2i.rowLocationColumn], lock_operation, true /* WAIT */, TransactionManager.LOCK_COMMIT_DURATION); } return(ret_status); } public boolean lockNonScanRowOnPage( BTree btree, LeafControlRow current_leaf, int current_slot, FetchDescriptor lock_fetch_desc, DataValueDescriptor[] lock_template, RowLocation lock_row_loc, int lock_operation) throws StandardException { return( lockRowOnPage( btree, current_leaf, null, current_slot, false, lock_fetch_desc, lock_template, lock_row_loc, lock_operation, TransactionManager.LOCK_COMMIT_DURATION)); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sqoop.lib; import java.math.BigDecimal; import java.sql.Date; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Time; import java.sql.Timestamp; import org.apache.hadoop.io.BytesWritable; import com.cloudera.sqoop.lib.BlobRef; import com.cloudera.sqoop.lib.ClobRef; /** * Contains a set of methods which can read db columns from a ResultSet into * Java types, and do serialization of these types to/from DataInput/DataOutput * for use with Hadoop's Writable implementation. This supports null values * for all types. */ public final class JdbcWritableBridge { // Currently, cap BLOB/CLOB objects at 16 MB until we can use external // storage. public static final long MAX_BLOB_LENGTH = 16 * 1024 * 1024; public static final long MAX_CLOB_LENGTH = 16 * 1024 * 1024; private JdbcWritableBridge() { } public static Integer readInteger(int colNum, ResultSet r) throws SQLException { int val; val = r.getInt(colNum); if (r.wasNull()) { return null; } else { return Integer.valueOf(val); } } public static Long readLong(int colNum, ResultSet r) throws SQLException { long val; val = r.getLong(colNum); if (r.wasNull()) { return null; } else { return Long.valueOf(val); } } public static String readString(int colNum, ResultSet r) throws SQLException { return r.getString(colNum); } public static Float readFloat(int colNum, ResultSet r) throws SQLException { float val; val = r.getFloat(colNum); if (r.wasNull()) { return null; } else { return Float.valueOf(val); } } public static Double readDouble(int colNum, ResultSet r) throws SQLException { double val; val = r.getDouble(colNum); if (r.wasNull()) { return null; } else { return Double.valueOf(val); } } public static Boolean readBoolean(int colNum, ResultSet r) throws SQLException { boolean val; val = r.getBoolean(colNum); if (r.wasNull()) { return null; } else { return Boolean.valueOf(val); } } public static Time readTime(int colNum, ResultSet r) throws SQLException { return r.getTime(colNum); } public static Timestamp readTimestamp(int colNum, ResultSet r) throws SQLException { return r.getTimestamp(colNum); } public static Date readDate(int colNum, ResultSet r) throws SQLException { return r.getDate(colNum); } public static BytesWritable readBytesWritable(int colNum, ResultSet r) throws SQLException { byte [] bytes = r.getBytes(colNum); return bytes == null ? null : new BytesWritable(bytes); } public static BigDecimal readBigDecimal(int colNum, ResultSet r) throws SQLException { return r.getBigDecimal(colNum); } public static BlobRef readBlobRef(int colNum, ResultSet r) throws SQLException { // Loading of BLOBs is delayed; handled by LargeObjectLoader. return null; } public static ClobRef readClobRef(int colNum, ResultSet r) throws SQLException { // Loading of CLOBs is delayed; handled by LargeObjectLoader. return null; } public static void writeInteger(Integer val, int paramIdx, int sqlType, PreparedStatement s) throws SQLException { if (null == val) { s.setNull(paramIdx, sqlType); } else { s.setInt(paramIdx, val); } } public static void writeLong(Long val, int paramIdx, int sqlType, PreparedStatement s) throws SQLException { if (null == val) { s.setNull(paramIdx, sqlType); } else { s.setLong(paramIdx, val); } } public static void writeDouble(Double val, int paramIdx, int sqlType, PreparedStatement s) throws SQLException { if (null == val) { s.setNull(paramIdx, sqlType); } else { s.setDouble(paramIdx, val); } } public static void writeBoolean(Boolean val, int paramIdx, int sqlType, PreparedStatement s) throws SQLException { if (null == val) { s.setNull(paramIdx, sqlType); } else { s.setBoolean(paramIdx, val); } } public static void writeFloat(Float val, int paramIdx, int sqlType, PreparedStatement s) throws SQLException { if (null == val) { s.setNull(paramIdx, sqlType); } else { s.setFloat(paramIdx, val); } } public static void writeString(String val, int paramIdx, int sqlType, PreparedStatement s) throws SQLException { if (null == val) { s.setNull(paramIdx, sqlType); } else { s.setString(paramIdx, val); } } public static void writeTimestamp(Timestamp val, int paramIdx, int sqlType, PreparedStatement s) throws SQLException { if (null == val) { s.setNull(paramIdx, sqlType); } else { s.setTimestamp(paramIdx, val); } } public static void writeTime(Time val, int paramIdx, int sqlType, PreparedStatement s) throws SQLException { if (null == val) { s.setNull(paramIdx, sqlType); } else { s.setTime(paramIdx, val); } } public static void writeDate(Date val, int paramIdx, int sqlType, PreparedStatement s) throws SQLException { if (null == val) { s.setNull(paramIdx, sqlType); } else { s.setDate(paramIdx, val); } } public static void writeBytesWritable(BytesWritable val, int paramIdx, int sqlType, PreparedStatement s) throws SQLException { if (null == val) { s.setNull(paramIdx, sqlType); } else { // val.getBytes() is only valid in [0, len) byte [] rawBytes = val.getBytes(); int len = val.getLength(); byte [] outBytes = new byte[len]; System.arraycopy(rawBytes, 0, outBytes, 0, len); s.setBytes(paramIdx, outBytes); } } public static void writeBigDecimal(BigDecimal val, int paramIdx, int sqlType, PreparedStatement s) throws SQLException { if (null == val) { s.setNull(paramIdx, sqlType); } else { s.setBigDecimal(paramIdx, val); } } public static void writeBlobRef(com.cloudera.sqoop.lib.BlobRef val, int paramIdx, int sqlType, PreparedStatement s) throws SQLException { // TODO: support this. throw new RuntimeException("Unsupported: Cannot export BLOB data"); } public static void writeClobRef(com.cloudera.sqoop.lib.ClobRef val, int paramIdx, int sqlType, PreparedStatement s) throws SQLException { // TODO: support this. throw new RuntimeException("Unsupported: Cannot export CLOB data"); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.query; import org.apache.lucene.index.Term; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.WildcardQuery; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.support.QueryParsers; import java.io.IOException; import java.util.Objects; import java.util.Optional; /** * Implements the wildcard search query. Supported wildcards are <tt>*</tt>, which * matches any character sequence (including the empty one), and <tt>?</tt>, * which matches any single character. Note this query can be slow, as it * needs to iterate over many terms. In order to prevent extremely slow WildcardQueries, * a Wildcard term should not start with one of the wildcards <tt>*</tt> or * <tt>?</tt>. */ public class WildcardQueryBuilder extends AbstractQueryBuilder<WildcardQueryBuilder> implements MultiTermQueryBuilder { public static final String NAME = "wildcard"; private static final ParseField WILDCARD_FIELD = new ParseField("wildcard"); private static final ParseField VALUE_FIELD = new ParseField("value"); private static final ParseField REWRITE_FIELD = new ParseField("rewrite"); private final String fieldName; private final String value; private String rewrite; /** * Implements the wildcard search query. Supported wildcards are <tt>*</tt>, which * matches any character sequence (including the empty one), and <tt>?</tt>, * which matches any single character. Note this query can be slow, as it * needs to iterate over many terms. In order to prevent extremely slow WildcardQueries, * a Wildcard term should not start with one of the wildcards <tt>*</tt> or * <tt>?</tt>. * * @param fieldName The field name * @param value The wildcard query string */ public WildcardQueryBuilder(String fieldName, String value) { if (Strings.isEmpty(fieldName)) { throw new IllegalArgumentException("field name is null or empty"); } if (value == null) { throw new IllegalArgumentException("value cannot be null."); } this.fieldName = fieldName; this.value = value; } /** * Read from a stream. */ public WildcardQueryBuilder(StreamInput in) throws IOException { super(in); fieldName = in.readString(); value = in.readString(); rewrite = in.readOptionalString(); } @Override protected void doWriteTo(StreamOutput out) throws IOException { out.writeString(fieldName); out.writeString(value); out.writeOptionalString(rewrite); } public String fieldName() { return fieldName; } public String value() { return value; } public WildcardQueryBuilder rewrite(String rewrite) { this.rewrite = rewrite; return this; } public String rewrite() { return this.rewrite; } @Override public String getWriteableName() { return NAME; } @Override protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(NAME); builder.startObject(fieldName); builder.field(WILDCARD_FIELD.getPreferredName(), value); if (rewrite != null) { builder.field(REWRITE_FIELD.getPreferredName(), rewrite); } printBoostAndQueryName(builder); builder.endObject(); builder.endObject(); } public static Optional<WildcardQueryBuilder> fromXContent(QueryParseContext parseContext) throws IOException { XContentParser parser = parseContext.parser(); XContentParser.Token token = parser.nextToken(); if (token != XContentParser.Token.FIELD_NAME) { throw new ParsingException(parser.getTokenLocation(), "[wildcard] query malformed, no field"); } String fieldName = parser.currentName(); String rewrite = null; String value = null; float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; token = parser.nextToken(); if (token == XContentParser.Token.START_OBJECT) { String currentFieldName = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else { if (parseContext.getParseFieldMatcher().match(currentFieldName, WILDCARD_FIELD)) { value = parser.text(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, VALUE_FIELD)) { value = parser.text(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) { boost = parser.floatValue(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, REWRITE_FIELD)) { rewrite = parser.textOrNull(); } else if (parseContext.getParseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) { queryName = parser.text(); } else { throw new ParsingException(parser.getTokenLocation(), "[wildcard] query does not support [" + currentFieldName + "]"); } } } parser.nextToken(); } else { value = parser.text(); parser.nextToken(); } if (value == null) { throw new ParsingException(parser.getTokenLocation(), "No value specified for wildcard query"); } return Optional.of(new WildcardQueryBuilder(fieldName, value) .rewrite(rewrite) .boost(boost) .queryName(queryName)); } @Override protected Query doToQuery(QueryShardContext context) throws IOException { MappedFieldType fieldType = context.fieldMapper(fieldName); Term term; if (fieldType == null) { term = new Term(fieldName, BytesRefs.toBytesRef(value)); } else { Query termQuery = fieldType.termQuery(value, context); term = MappedFieldType.extractTerm(termQuery); } WildcardQuery query = new WildcardQuery(term); MultiTermQuery.RewriteMethod rewriteMethod = QueryParsers.parseRewriteMethod(context.getParseFieldMatcher(), rewrite, null); QueryParsers.setRewriteMethod(query, rewriteMethod); return query; } @Override protected int doHashCode() { return Objects.hash(fieldName, value, rewrite); } @Override protected boolean doEquals(WildcardQueryBuilder other) { return Objects.equals(fieldName, other.fieldName) && Objects.equals(value, other.value) && Objects.equals(rewrite, other.rewrite); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.tests.java.util; import tests.support.Support_ListTest; import java.util.Arrays; import java.util.Collection; import java.util.Enumeration; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.NoSuchElementException; import java.util.Vector; public class VectorTest extends junit.framework.TestCase { private Vector tVector = new Vector(); Object[] objArray; private String vString = "[Test 0, Test 1, Test 2, Test 3, Test 4, Test 5, Test 6, Test 7, Test 8, Test 9, Test 10, Test 11, Test 12, Test 13, Test 14, Test 15, Test 16, Test 17, Test 18, Test 19, Test 20, Test 21, Test 22, Test 23, Test 24, Test 25, Test 26, Test 27, Test 28, Test 29, Test 30, Test 31, Test 32, Test 33, Test 34, Test 35, Test 36, Test 37, Test 38, Test 39, Test 40, Test 41, Test 42, Test 43, Test 44, Test 45, Test 46, Test 47, Test 48, Test 49, Test 50, Test 51, Test 52, Test 53, Test 54, Test 55, Test 56, Test 57, Test 58, Test 59, Test 60, Test 61, Test 62, Test 63, Test 64, Test 65, Test 66, Test 67, Test 68, Test 69, Test 70, Test 71, Test 72, Test 73, Test 74, Test 75, Test 76, Test 77, Test 78, Test 79, Test 80, Test 81, Test 82, Test 83, Test 84, Test 85, Test 86, Test 87, Test 88, Test 89, Test 90, Test 91, Test 92, Test 93, Test 94, Test 95, Test 96, Test 97, Test 98, Test 99]"; /** * java.util.Vector#Vector() */ public void test_Constructor() { // Test for method java.util.Vector() Vector tv = new Vector(100); for (int i = 0; i < 100; i++) tv.addElement(new Integer(i)); new Support_ListTest("", tv).runTest(); tv = new Vector(200); for (int i = -50; i < 150; i++) tv.addElement(new Integer(i)); new Support_ListTest("", tv.subList(50, 150)).runTest(); Vector v = new Vector(); assertEquals("Vector creation failed", 0, v.size()); assertEquals("Wrong capacity", 10, v.capacity()); } /** * java.util.Vector#Vector(int) */ public void test_ConstructorI() { // Test for method java.util.Vector(int) Vector v = new Vector(100); assertEquals("Vector creation failed", 0, v.size()); assertEquals("Wrong capacity", 100, v.capacity()); try { new Vector(-1); fail("IllegalArgumentException expected"); } catch (IllegalArgumentException e) { //expected } } /** * java.util.Vector#Vector(int, int) */ public void test_ConstructorII() { // Test for method java.util.Vector(int, int) Vector v = new Vector(2, 10); v.addElement(new Object()); v.addElement(new Object()); v.addElement(new Object()); assertEquals("Failed to inc capacity by proper amount", 12, v.capacity()); Vector grow = new Vector(3, -1); grow.addElement("one"); grow.addElement("two"); grow.addElement("three"); grow.addElement("four"); assertEquals("Wrong size", 4, grow.size()); assertEquals("Wrong capacity", 6, grow.capacity()); Vector emptyVector = new Vector(0, 0); emptyVector.addElement("one"); assertEquals("Wrong size", 1, emptyVector.size()); emptyVector.addElement("two"); emptyVector.addElement("three"); assertEquals("Wrong size", 3, emptyVector.size()); try { Vector negativeVector = new Vector(-1, 0); fail("Should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { // Excepted } } /** * java.util.Vector#Vector(java.util.Collection) */ public void test_ConstructorLjava_util_Collection() { // Test for method java.util.Vector(java.util.Collection) Collection l = new LinkedList(); for (int i = 0; i < 100; i++) l.add("Test " + i); Vector myVector = new Vector(l); assertTrue("Vector is not correct size", myVector.size() == objArray.length); for (int counter = 0; counter < objArray.length; counter++) assertTrue("Vector does not contain correct elements", myVector .contains(((List) l).get(counter))); try { new Vector(null); fail("NullPointerException expected"); } catch (NullPointerException e) { //expected } } /** * java.util.Vector#add(int, java.lang.Object) */ public void test_addILjava_lang_Object() { // Test for method void java.util.Vector.add(int, java.lang.Object) Object o = new Object(); Object prev = tVector.get(45); tVector.add(45, o); assertTrue("Failed to add Object", tVector.get(45) == o); assertTrue("Failed to fix-up existing indices", tVector.get(46) == prev); assertEquals("Wrong size after add", 101, tVector.size()); prev = tVector.get(50); tVector.add(50, null); assertNull("Failed to add null", tVector.get(50)); assertTrue("Failed to fix-up existing indices after adding null", tVector.get(51) == prev); assertEquals("Wrong size after add", 102, tVector.size()); try { tVector.add(-5, null); fail("ArrayIndexOutOfBoundsException expected"); } catch(ArrayIndexOutOfBoundsException e) { //expected } try { tVector.add(tVector.size() + 1, null); fail("ArrayIndexOutOfBoundsException expected"); } catch(ArrayIndexOutOfBoundsException e) { //expected } } /** * java.util.Vector#add(java.lang.Object) */ public void test_addLjava_lang_Object() { // Test for method boolean java.util.Vector.add(java.lang.Object) Object o = new Object(); tVector.add(o); assertTrue("Failed to add Object", tVector.lastElement() == o); assertEquals("Wrong size after add", 101, tVector.size()); tVector.add(null); assertNull("Failed to add null", tVector.lastElement()); assertEquals("Wrong size after add", 102, tVector.size()); } /** * java.util.Vector#addAll(int, java.util.Collection) */ public void test_addAllILjava_util_Collection() { // Test for method boolean java.util.Vector.addAll(int, // java.util.Collection) Collection l = new LinkedList(); for (int i = 0; i < 100; i++) l.add("Test " + i); Vector v = new Vector(); tVector.addAll(50, l); for (int i = 50; i < 100; i++) assertTrue("Failed to add all elements", tVector.get(i) == ((List) l).get(i - 50)); v = new Vector(); v.add("one"); int r = 0; try { v.addAll(3, Arrays.asList(new String[] { "two", "three" })); } catch (ArrayIndexOutOfBoundsException e) { r = 1; } catch (IndexOutOfBoundsException e) { r = 2; } assertTrue("Invalid add: " + r, r == 1); l = new LinkedList(); l.add(null); l.add("gah"); l.add(null); tVector.addAll(50, l); assertNull("Wrong element at position 50--wanted null", tVector.get(50)); assertEquals("Wrong element at position 51--wanted 'gah'", "gah", tVector .get(51)); assertNull("Wrong element at position 52--wanted null", tVector.get(52)); try { tVector.addAll(-5, Arrays.asList(new String[] { "two", "three" })); fail("ArrayIndexOutOfBoundsException expected"); } catch(ArrayIndexOutOfBoundsException e) { //expected } try { tVector.addAll(tVector.size() + 1, Arrays.asList(new String[] { "two", "three" })); fail("ArrayIndexOutOfBoundsException expected"); } catch(ArrayIndexOutOfBoundsException e) { //expected } try { tVector.addAll(tVector.size() / 2, null); fail("NullPointerException expected"); } catch(NullPointerException e) { //expected } } /** * java.util.Vector#addAll(java.util.Collection) */ public void test_addAllLjava_util_Collection() { // Test for method boolean java.util.Vector.addAll(java.util.Collection) Vector v = new Vector(); Collection l = new LinkedList(); for (int i = 0; i < 100; i++) l.add("Test " + i); v.addAll(l); assertTrue("Failed to add all elements", tVector.equals(v)); v.addAll(l); int vSize = tVector.size(); for (int counter = vSize - 1; counter >= 0; counter--) assertTrue("Failed to add elements correctly", v.get(counter) == v .get(counter + vSize)); l = new LinkedList(); l.add(null); l.add("gah"); l.add(null); tVector.addAll(l); assertNull("Wrong element at 3rd last position--wanted null", tVector .get(vSize)); assertEquals("Wrong element at 2nd last position--wanted 'gah'", "gah", tVector .get(vSize + 1)); assertNull("Wrong element at last position--wanted null", tVector .get(vSize + 2)); try { tVector.addAll(tVector.size() / 2, null); fail("NullPointerException expected"); } catch(NullPointerException e) { //expected } } /** * java.util.Vector#addElement(java.lang.Object) */ public void test_addElementLjava_lang_Object() { // Test for method void java.util.Vector.addElement(java.lang.Object) Vector v = vectorClone(tVector); v.addElement("Added Element"); assertTrue("Failed to add element", v.contains("Added Element")); assertEquals("Added Element to wrong slot", "Added Element", ((String) v.elementAt(100)) ); v.addElement(null); assertTrue("Failed to add null", v.contains(null)); assertNull("Added null to wrong slot", v.elementAt(101)); } /** * java.util.Vector#addElement(java.lang.Object) */ public void test_addElementLjava_lang_Object_subtest0() { // Test for method void java.util.Vector.addElement(java.lang.Object) Vector v = vectorClone(tVector); v.addElement("Added Element"); assertTrue("Failed to add element", v.contains("Added Element")); assertEquals("Added Element to wrong slot", "Added Element", ((String) v.elementAt(100)) ); v.addElement(null); assertTrue("Failed to add null", v.contains(null)); assertNull("Added null to wrong slot", v.elementAt(101)); } /** * java.util.Vector#capacity() */ public void test_capacity() { // Test for method int java.util.Vector.capacity() Vector v = new Vector(9); assertEquals("Incorrect capacity returned", 9, v.capacity()); } /** * java.util.Vector#clear() */ public void test_clear() { // Test for method void java.util.Vector.clear() Vector orgVector = vectorClone(tVector); tVector.clear(); assertEquals("a) Cleared Vector has non-zero size", 0, tVector.size()); Enumeration e = orgVector.elements(); while (e.hasMoreElements()) assertTrue("a) Cleared vector contained elements", !tVector .contains(e.nextElement())); tVector.add(null); tVector.clear(); assertEquals("b) Cleared Vector has non-zero size", 0, tVector.size()); e = orgVector.elements(); while (e.hasMoreElements()) assertTrue("b) Cleared vector contained elements", !tVector .contains(e.nextElement())); } /** * java.util.Vector#clone() */ public void test_clone() { // Test for method java.lang.Object java.util.Vector.clone() tVector.add(25, null); tVector.add(75, null); Vector v = (Vector) tVector.clone(); Enumeration orgNum = tVector.elements(); Enumeration cnum = v.elements(); while (orgNum.hasMoreElements()) { assertTrue("Not enough elements copied", cnum.hasMoreElements()); assertTrue("Vector cloned improperly, elements do not match", orgNum.nextElement() == cnum.nextElement()); } assertTrue("Not enough elements copied", !cnum.hasMoreElements()); } /** * java.util.Vector#contains(java.lang.Object) */ public void test_containsLjava_lang_Object() { // Test for method boolean java.util.Vector.contains(java.lang.Object) assertTrue("Did not find element", tVector.contains("Test 42")); assertTrue("Found bogus element", !tVector.contains("Hello")); assertTrue( "Returned true looking for null in vector without null element", !tVector.contains(null)); tVector.insertElementAt(null, 20); assertTrue( "Returned false looking for null in vector with null element", tVector.contains(null)); } /** * java.util.Vector#containsAll(java.util.Collection) */ public void test_containsAllLjava_util_Collection() { // Test for method boolean // java.util.Vector.containsAll(java.util.Collection) Collection s = new HashSet(); for (int i = 0; i < 100; i++) s.add("Test " + i); assertTrue("Returned false for valid collection", tVector .containsAll(s)); s.add(null); assertTrue("Returned true for invlaid collection containing null", !tVector.containsAll(s)); tVector.add(25, null); assertTrue("Returned false for valid collection containing null", tVector.containsAll(s)); s = new HashSet(); s.add(new Object()); assertTrue("Returned true for invalid collection", !tVector .containsAll(s)); try { tVector.containsAll(null); fail("NullPointerException expected"); } catch (NullPointerException e) { //expected } } /** * java.util.Vector#copyInto(java.lang.Object[]) */ public void test_copyInto$Ljava_lang_Object() { // Test for method void java.util.Vector.copyInto(java.lang.Object []) Object[] a = new Object[100]; tVector.setElementAt(null, 20); tVector.copyInto(a); for (int i = 0; i < 100; i++) assertTrue("copyInto failed", a[i] == tVector.elementAt(i)); try { tVector.copyInto(null); fail("NullPointerException expected"); } catch (NullPointerException e) { //expected } } /** * java.util.Vector#elementAt(int) */ public void test_elementAtI() { // Test for method java.lang.Object java.util.Vector.elementAt(int) assertEquals("Incorrect element returned", "Test 18", ((String) tVector .elementAt(18))); tVector.setElementAt(null, 20); assertNull("Incorrect element returned--wanted null", tVector .elementAt(20)); try { tVector.elementAt(-5); fail("ArrayIndexOutOfBoundsException expected"); } catch(ArrayIndexOutOfBoundsException e) { //expected } try { tVector.elementAt(tVector.size() + 1); fail("ArrayIndexOutOfBoundsException expected"); } catch(ArrayIndexOutOfBoundsException e) { //expected } } /** * java.util.Vector#elements() */ public void test_elements() { // Test for method java.util.Enumeration java.util.Vector.elements() tVector.insertElementAt(null, 20); Enumeration e = tVector.elements(); int i = 0; while (e.hasMoreElements()) { assertTrue("Enumeration returned incorrect element at pos: " + i, e .nextElement() == tVector.elementAt(i)); i++; } assertTrue("Invalid enumeration", i == tVector.size()); } /** * java.util.Vector#elements() */ public void test_elements_subtest0() { final int iterations = 10000; final Vector v = new Vector(); Thread t1 = new Thread() { public void run() { for (int i = 0; i < iterations; i++) { synchronized (v) { v.addElement(String.valueOf(i)); v.removeElementAt(0); } } } }; t1.start(); for (int i = 0; i < iterations; i++) { Enumeration en = v.elements(); try { while (true) { Object result = en.nextElement(); if (result == null) { fail("Null result: " + i); } } } catch (NoSuchElementException e) { } } } /** * java.util.Vector#ensureCapacity(int) */ public void test_ensureCapacityI() { // Test for method void java.util.Vector.ensureCapacity(int) Vector v = new Vector(9); v.ensureCapacity(20); assertEquals("ensureCapacity failed to set correct capacity", 20, v .capacity()); v = new Vector(100); assertEquals("ensureCapacity reduced capacity", 100, v.capacity()); v.ensureCapacity(150); assertEquals( "ensuieCapacity failed to set to be twice the old capacity", 200, v.capacity()); v = new Vector(9, -1); v.ensureCapacity(20); assertEquals("ensureCapacity failed to set to be minCapacity", 20, v .capacity()); v.ensureCapacity(15); assertEquals("ensureCapacity reduced capacity", 20, v.capacity()); v.ensureCapacity(35); assertEquals( "ensuieCapacity failed to set to be twice the old capacity", 40, v.capacity()); v = new Vector(9, 4); v.ensureCapacity(11); assertEquals("ensureCapacity failed to set correct capacity", 13, v .capacity()); v.ensureCapacity(5); assertEquals("ensureCapacity reduced capacity", 13, v.capacity()); v.ensureCapacity(20); assertEquals( "ensureCapacity failed to set to be twice the old capacity", 20, v.capacity()); } /** * java.util.Vector#equals(java.lang.Object) */ public void test_equalsLjava_lang_Object() { // Test for method boolean java.util.Vector.equals(java.lang.Object) Vector v = new Vector(); for (int i = 0; i < 100; i++) v.addElement("Test " + i); assertTrue("a) Equal vectors returned false", tVector.equals(v)); v.addElement(null); assertTrue("b) UnEqual vectors returned true", !tVector.equals(v)); tVector.addElement(null); assertTrue("c) Equal vectors returned false", tVector.equals(v)); tVector.removeElementAt(22); assertTrue("d) UnEqual vectors returned true", !tVector.equals(v)); assertTrue("e) Equal vectors returned false", tVector.equals(tVector)); assertFalse("f) UnEqual vectors returned true", tVector .equals(new Object())); assertFalse("g) Unequal vectors returned true", tVector.equals(null)); } /** * java.util.Vector#firstElement() */ public void test_firstElement() { // Test for method java.lang.Object java.util.Vector.firstElement() assertEquals("Returned incorrect firstElement", "Test 0", tVector.firstElement() ); tVector.insertElementAt(null, 0); assertNull("Returned incorrect firstElement--wanted null", tVector .firstElement()); Vector v = new Vector(10); try { v.firstElement(); fail("Should throw NoSuchElementException"); } catch (NoSuchElementException e) { // Excepted } } /** * java.util.Vector#get(int) */ public void test_getI() { // Test for method java.lang.Object java.util.Vector.get(int) assertEquals("Get returned incorrect object", "Test 80", tVector.get(80)); tVector.add(25, null); assertNull("Returned incorrect element--wanted null", tVector.get(25)); try { tVector.get(-5); fail("ArrayIndexOutOfBoundsException expected"); } catch(ArrayIndexOutOfBoundsException e) { //expected } try { tVector.get(tVector.size() + 1); fail("ArrayIndexOutOfBoundsException expected"); } catch(ArrayIndexOutOfBoundsException e) { //expected } } /** * java.util.Vector#hashCode() */ public void test_hashCode() { // Test for method int java.util.Vector.hashCode() int hashCode = 1; // one tVector.insertElementAt(null, 20); for (int i = 0; i < tVector.size(); i++) { Object obj = tVector.elementAt(i); hashCode = 31 * hashCode + (obj == null ? 0 : obj.hashCode()); } assertTrue("Incorrect hashCode returned. Wanted: " + hashCode + " got: " + tVector.hashCode(), tVector.hashCode() == hashCode); } /** * java.util.Vector#indexOf(java.lang.Object) */ public void test_indexOfLjava_lang_Object() { // Test for method int java.util.Vector.indexOf(java.lang.Object) assertEquals("Incorrect index returned", 10, tVector.indexOf("Test 10")); assertEquals("Index returned for invalid Object", -1, tVector .indexOf("XXXXXXXXXXX")); tVector.setElementAt(null, 20); tVector.setElementAt(null, 40); assertTrue("Incorrect indexOf returned for null: " + tVector.indexOf(null), tVector.indexOf(null) == 20); } /** * java.util.Vector#indexOf(java.lang.Object, int) */ public void test_indexOfLjava_lang_ObjectI() { // Test for method int java.util.Vector.indexOf(java.lang.Object, int) assertEquals("Failed to find correct index", tVector.indexOf("Test 98", 50), 98); assertTrue("Found index of bogus element", (tVector.indexOf( "Test 1001", 50) == -1)); tVector.setElementAt(null, 20); tVector.setElementAt(null, 40); tVector.setElementAt(null, 60); assertTrue("a) Incorrect indexOf returned for null: " + tVector.indexOf(null, 25), tVector.indexOf(null, 25) == 40); assertTrue("b) Incorrect indexOf returned for null: " + tVector.indexOf(null, 20), tVector.indexOf(null, 20) == 20); try { tVector.indexOf("Test 98", -1); fail("should throw ArrayIndexOutOfBoundsException"); } catch (ArrayIndexOutOfBoundsException e) { } assertEquals(-1, tVector.indexOf("Test 98", 1000)); assertEquals(-1, tVector.indexOf("Test 98", Integer.MAX_VALUE)); assertEquals(-1, tVector.indexOf("Test 98", tVector.size())); assertEquals(98, tVector.indexOf("Test 98", 0)); try { tVector.indexOf("Test 98", Integer.MIN_VALUE); fail("should throw ArrayIndexOutOfBoundsException"); } catch (ArrayIndexOutOfBoundsException e) { } } /** * java.util.Vector#insertElementAt(java.lang.Object, int) */ public void test_insertElementAtLjava_lang_ObjectI() { // Test for method void // java.util.Vector.insertElementAt(java.lang.Object, int) Vector v = vectorClone(tVector); String prevElement = (String) v.elementAt(99); v.insertElementAt("Inserted Element", 99); assertEquals("Element not inserted", "Inserted Element", ((String) v.elementAt(99)) ); assertTrue("Elements shifted incorrectly", ((String) v.elementAt(100)) .equals(prevElement)); v.insertElementAt(null, 20); assertNull("null not inserted", v.elementAt(20)); try { tVector.insertElementAt("Inserted Element", -1); fail("Should throw ArrayIndexOutOfBoundsException"); } catch (ArrayIndexOutOfBoundsException e) { // Excepted } try { tVector.insertElementAt(null, -1); fail("Should throw ArrayIndexOutOfBoundsException"); } catch (ArrayIndexOutOfBoundsException e) { // Excepted } try { tVector.insertElementAt("Inserted Element", tVector.size() + 1); fail("Should throw ArrayIndexOutOfBoundsException"); } catch (ArrayIndexOutOfBoundsException e) { // Excepted } try { tVector.insertElementAt(null, tVector.size() + 1); fail("Should throw ArrayIndexOutOfBoundsException"); } catch (ArrayIndexOutOfBoundsException e) { // Excepted } } /** * java.util.Vector#isEmpty() */ public void test_isEmpty() { // Test for method boolean java.util.Vector.isEmpty()Vector Vector v = new java.util.Vector(); assertTrue("Empty vector returned false", v.isEmpty()); v.addElement(new Object()); assertTrue("non-Empty vector returned true", !v.isEmpty()); } /** * java.util.Vector#isEmpty() */ public void test_isEmpty_subtest0() { final Vector v = new Vector(); v.addElement("initial"); Thread t1 = new Thread() { public void run() { while (!v.isEmpty()) ; v.addElement("final"); } }; t1.start(); for (int i = 0; i < 10000; i++) { synchronized (v) { v.removeElementAt(0); v.addElement(String.valueOf(i)); } int size; if ((size = v.size()) != 1) { String result = "Size is not 1: " + size + " " + v; // terminate the thread v.removeAllElements(); fail(result); } } // terminate the thread v.removeElementAt(0); } /** * java.util.Vector#lastElement() */ public void test_lastElement() { // Test for method java.lang.Object java.util.Vector.lastElement() assertEquals("Incorrect last element returned", "Test 99", tVector.lastElement() ); tVector.addElement(null); assertNull("Incorrect last element returned--wanted null", tVector .lastElement()); Vector vector = new Vector(); try { vector.lastElement(); fail("Should throw NoSuchElementException"); } catch (NoSuchElementException e) { // Excepted } } /** * java.util.Vector#lastIndexOf(java.lang.Object) */ public void test_lastIndexOfLjava_lang_Object() { // Test for method int java.util.Vector.lastIndexOf(java.lang.Object) Vector v = new Vector(9); for (int i = 0; i < 9; i++) v.addElement("Test"); v.addElement("z"); assertEquals("Failed to return correct index", 8, v.lastIndexOf("Test")); tVector.setElementAt(null, 20); tVector.setElementAt(null, 40); assertTrue("Incorrect lastIndexOf returned for null: " + tVector.lastIndexOf(null), tVector.lastIndexOf(null) == 40); } /** * java.util.Vector#lastIndexOf(java.lang.Object, int) */ public void test_lastIndexOfLjava_lang_ObjectI() { // Test for method int java.util.Vector.lastIndexOf(java.lang.Object, // int) assertEquals("Failed to find object", 0, tVector.lastIndexOf("Test 0", 0)); assertTrue("Found Object outside of index", (tVector.lastIndexOf( "Test 0", 10) > -1)); tVector.setElementAt(null, 20); tVector.setElementAt(null, 40); tVector.setElementAt(null, 60); assertTrue("Incorrect lastIndexOf returned for null: " + tVector.lastIndexOf(null, 15), tVector.lastIndexOf(null, 15) == -1); assertTrue("Incorrect lastIndexOf returned for null: " + tVector.lastIndexOf(null, 45), tVector.lastIndexOf(null, 45) == 40); assertEquals(-1, tVector.lastIndexOf("Test 98", -1)); assertEquals(-1, tVector.lastIndexOf("Test 98", 0)); try { assertEquals(-1, tVector.lastIndexOf("Test 98", 1000)); fail("should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { } try { assertEquals(-1, tVector.lastIndexOf("Test 98", Integer.MAX_VALUE)); fail("should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { } try { tVector.lastIndexOf("Test 98", tVector.size()); fail("should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { } try { tVector.indexOf("Test 98", Integer.MIN_VALUE); fail("should throw ArrayIndexOutOfBoundsException"); } catch (ArrayIndexOutOfBoundsException e) { } } /** * java.util.Vector#remove(int) */ public void test_removeI() { // Test for method java.lang.Object java.util.Vector.remove(int) Object removeElement = tVector.get(36); Object result = tVector.remove(36); assertFalse("Contained element after remove", tVector .contains("Test 36")); assertEquals("Should return the element that was removed", removeElement, result); assertEquals("Failed to decrement size after remove", 99, tVector.size()); tVector.add(20, null); removeElement = tVector.get(19); result = tVector.remove(19); assertNull("Didn't move null element over", tVector.get(19)); assertEquals("Should return the element that was removed", removeElement, result); removeElement = tVector.get(19); result = tVector.remove(19); assertNotNull("Didn't remove null element", tVector.get(19)); assertEquals("Should return the element that was removed", removeElement, result); assertEquals("Failed to decrement size after removing null", 98, tVector .size()); try { tVector.remove(-1); fail("Should throw ArrayIndexOutOfBoundsException"); } catch (ArrayIndexOutOfBoundsException e) { // Excepted } try { tVector.remove(tVector.size()); fail("Should throw ArrayIndexOutOfBoundsException"); } catch (ArrayIndexOutOfBoundsException e) { // Excepted } } /** * java.util.Vector#remove(java.lang.Object) */ public void test_removeLjava_lang_Object() { // Test for method boolean java.util.Vector.remove(java.lang.Object) tVector.remove("Test 0"); assertTrue("Contained element after remove", !tVector .contains("Test 0")); assertEquals("Failed to decrement size after remove", 99, tVector.size()); tVector.add(null); tVector.remove(null); assertTrue("Contained null after remove", !tVector.contains(null)); assertEquals("Failed to decrement size after removing null", 99, tVector .size()); } /** * java.util.Vector#removeAll(java.util.Collection) */ public void test_removeAllLjava_util_Collection() { // Test for method boolean // java.util.Vector.removeAll(java.util.Collection) Vector v = new Vector(); Collection l = new LinkedList(); for (int i = 0; i < 5; i++) l.add("Test " + i); v.addElement(l); Collection s = new HashSet(); Object o; s.add(o = v.firstElement()); v.removeAll(s); assertTrue("Failed to remove items in collection", !v.contains(o)); v.removeAll(l); assertTrue("Failed to remove all elements", v.isEmpty()); v.add(null); v.add(null); v.add("Boom"); v.removeAll(s); assertEquals("Should not have removed any elements", 3, v.size()); l = new LinkedList(); l.add(null); v.removeAll(l); assertEquals("Should only have one element", 1, v.size()); assertEquals("Element should be 'Boom'", "Boom", v.firstElement()); try { v.removeAll(null); fail("NullPointerException expected"); } catch (NullPointerException e) { //expected } } /** * java.util.Vector#removeAllElements() */ public void test_removeAllElements() { // Test for method void java.util.Vector.removeAllElements() Vector v = vectorClone(tVector); v.removeAllElements(); assertEquals("Failed to remove all elements", 0, v.size()); } /** * java.util.Vector#removeElement(java.lang.Object) */ public void test_removeElementLjava_lang_Object() { // Test for method boolean // java.util.Vector.removeElement(java.lang.Object) Vector v = vectorClone(tVector); v.removeElement("Test 98"); assertEquals("Element not removed", "Test 99", ((String) v.elementAt(98)) ); assertTrue("Vector is wrong size after removal: " + v.size(), v.size() == 99); tVector.addElement(null); v.removeElement(null); assertTrue("Vector is wrong size after removing null: " + v.size(), v .size() == 99); } /** * java.util.Vector#removeElementAt(int) */ public void test_removeElementAtI() { // Test for method void java.util.Vector.removeElementAt(int) Vector v = vectorClone(tVector); int size = v.size(); v.removeElementAt(50); assertEquals("Failed to remove element", -1, v.indexOf("Test 50", 0)); assertEquals("Test 51", v.get(50)); assertEquals(size - 1, v.size()); tVector.insertElementAt(null, 60); assertNull(tVector.get(60)); size = tVector.size(); tVector.removeElementAt(60); assertNotNull("Element at 60 should not be null after removal", tVector .elementAt(60)); assertEquals(size - 1, tVector.size()); try { tVector.removeElementAt(-1); fail("Should throw ArrayIndexOutOfBoundsException"); } catch (ArrayIndexOutOfBoundsException e) { // Excepted } try { tVector.removeElementAt(tVector.size()); fail("Should throw ArrayIndexOutOfBoundsException"); } catch (ArrayIndexOutOfBoundsException e) { // Excepted } } /** * {@link java.util.Vector#removeRange(int, int)} */ public void test_removeRange() { MockVector myVector = new MockVector(); myVector.removeRange(0, 0); try { myVector.removeRange(0, 1); fail("Should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // Excepted } int[] data = { 1, 2, 3, 4 }; for (int i = 0; i < data.length; i++) { myVector.add(i, data[i]); } myVector.removeRange(0, 2); assertEquals(data[2], myVector.get(0)); assertEquals(data[3], myVector.get(1)); try { myVector.removeRange(-1, 1); fail("Should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // Excepted } try { myVector.removeRange(0, -1); fail("Should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // Excepted } try { myVector.removeRange(1, 0); fail("Should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // Excepted } try { myVector.removeRange(2, 1); fail("Should throw IndexOutOfBoundsException"); } catch (IndexOutOfBoundsException e) { // Excepted } } /** * java.util.Vector#retainAll(java.util.Collection) */ public void test_retainAllLjava_util_Collection() { // Test for method boolean // java.util.Vector.retainAll(java.util.Collection) Object o = tVector.firstElement(); tVector.add(null); Collection s = new HashSet(); s.add(o); s.add(null); tVector.retainAll(s); assertTrue("Retained items other than specified", tVector.size() == 2 && tVector.contains(o) && tVector.contains(null)); } /** * java.util.Vector#set(int, java.lang.Object) */ public void test_setILjava_lang_Object() { // Test for method java.lang.Object java.util.Vector.set(int, // java.lang.Object) Object o = new Object(); Object previous = tVector.get(23); Object result = tVector.set(23, o); assertEquals( "Should return the element previously at the specified position", previous, result); assertTrue("Failed to set Object", tVector.get(23) == o); previous = tVector.get(0); result = tVector.set(0, null); assertEquals( "Should return the element previously at the specified position", previous, result); assertNull("Failed to set Object", tVector.get(0)); try { tVector.set(-1, o); fail("Should throw ArrayIndexOutOfBoundsException"); } catch (ArrayIndexOutOfBoundsException e) { // Excepted } try { tVector.set(-1, null); fail("Should throw ArrayIndexOutOfBoundsException"); } catch (ArrayIndexOutOfBoundsException e) { // Excepted } try { tVector.set(tVector.size(), o); fail("Should throw ArrayIndexOutOfBoundsException"); } catch (ArrayIndexOutOfBoundsException e) { // Excepted } try { tVector.set(tVector.size(), null); fail("Should throw ArrayIndexOutOfBoundsException"); } catch (ArrayIndexOutOfBoundsException e) { // Excepted } } /** * java.util.Vector#setElementAt(java.lang.Object, int) */ public void test_setElementAtLjava_lang_ObjectI() { // Test for method void java.util.Vector.setElementAt(java.lang.Object, // int) Vector v = vectorClone(tVector); v.setElementAt("Inserted Element", 99); assertEquals("Element not set", "Inserted Element", ((String) v.elementAt(99)) ); v.setElementAt(null, 0); assertNull("Null element not set", v.elementAt(0)); try { v.setElementAt("Inserted Element", -1); fail("Should throw ArrayIndexOutOfBoundsException"); } catch (ArrayIndexOutOfBoundsException e) { // Excepted } try { v.setElementAt(null, -1); fail("Should throw ArrayIndexOutOfBoundsException"); } catch (ArrayIndexOutOfBoundsException e) { // Excepted } try { v.setElementAt("Inserted Element", v.size()); fail("Should throw ArrayIndexOutOfBoundsException"); } catch (ArrayIndexOutOfBoundsException e) { // Excepted } try { v.setElementAt(null, v.size()); fail("Should throw ArrayIndexOutOfBoundsException"); } catch (ArrayIndexOutOfBoundsException e) { // Excepted } } /** * java.util.Vector#setSize(int) */ public void test_setSizeI() { // Test for method void java.util.Vector.setSize(int) Vector v = vectorClone(tVector); int oldSize = v.size(); Object preElement = v.get(10); v.setSize(10); assertEquals("Failed to set size", 10, v.size()); assertEquals( "All components at index newSize and greater should be discarded", -1, v.indexOf(preElement)); try { v.get(oldSize - 1); } catch (ArrayIndexOutOfBoundsException e) { // Excepted; } oldSize = v.size(); v.setSize(20); assertEquals("Failed to set size", 20, v.size()); for (int i = oldSize; i < v.size(); i++) { assertNull(v.get(i)); } try { v.setSize(-1); fail("Should throw ArrayIndexOutOfBoundsException"); } catch (ArrayIndexOutOfBoundsException e) { // Excepted } } /** * java.util.Vector#size() */ public void test_size() { // Test for method int java.util.Vector.size() assertEquals("Returned incorrect size", 100, tVector.size()); final Vector v = new Vector(); v.addElement("initial"); Thread t1 = new Thread() { public void run() { while (v.size() > 0) ; v.addElement("final"); } }; t1.start(); for (int i = 0; i < 10000; i++) { synchronized (v) { v.removeElementAt(0); v.addElement(String.valueOf(i)); } int size; if ((size = v.size()) != 1) { String result = "Size is not 1: " + size + " " + v; // terminate the thread v.removeAllElements(); fail(result); } } // terminate the thread v.removeElementAt(0); } /** * java.util.Vector#subList(int, int) */ public void test_subListII() { // Test for method java.util.List java.util.Vector.subList(int, int) List sl = tVector.subList(10, 25); assertEquals("Returned sublist of incorrect size", 15, sl.size()); for (int i = 10; i < 25; i++) assertTrue("Returned incorrect sublist", sl .contains(tVector.get(i))); assertEquals("Not synchronized random access", "java.util.Collections$SynchronizedRandomAccessList", sl.getClass().getName() ); } /** * java.util.Vector#toArray() */ public void test_toArray() { // Test for method java.lang.Object [] java.util.Vector.toArray() assertTrue("Returned incorrect array", Arrays.equals(objArray, tVector .toArray())); } /** * java.util.Vector#toArray(java.lang.Object[]) */ public void test_toArray$Ljava_lang_Object() { // Test for method java.lang.Object [] // java.util.Vector.toArray(java.lang.Object []) Object[] o = new Object[1000]; Object f = new Object(); for (int i = 0; i < o.length; i++) o[i] = f; tVector.toArray(o); assertNull("Failed to set slot to null", o[100]); for (int i = 0; i < tVector.size(); i++) assertTrue("Returned incorrect array", tVector.elementAt(i) == o[i]); } class SubVector<E> extends Vector<E> { private static final long serialVersionUID = 1L; public SubVector() { super(); } public synchronized boolean add(E obj) { super.addElement(obj); return true; } public synchronized void addElement(E obj) { super.add(obj); } /** * java.util.Vector#add(Object) */ @SuppressWarnings("nls") public void test_add() { SubVector<String> subvector = new SubVector<String>(); subvector.add("foo"); subvector.addElement("bar"); assertEquals("Expected two elements in vector", 2, subvector.size()); } } /** * java.util.Vector#toString() */ public void test_toString() { // Ensure toString works with self-referencing elements. Vector<Object> vec = new Vector<Object>(3); vec.add(null); vec.add(new Object()); vec.add(vec); assertNotNull(vec.toString()); // Test for method java.lang.String java.util.Vector.toString() assertTrue("Incorrect String returned", tVector.toString().equals( vString)); Vector v = new Vector(); v.addElement("one"); v.addElement(v); v.addElement("3"); // test last element v.addElement(v); String result = v.toString(); assertTrue("should contain self ref", result.indexOf("(this") > -1); } public void test_override_size() throws Exception { Vector v = new Vector(); Vector testv = new MockVector(); // though size is overriden, it should passed without exception testv.add(1); testv.add(2); testv.clear(); testv.add(1); testv.add(2); v.add(1); v.add(2); // RI's bug here assertTrue(testv.equals(v)); } /** * java.util.Vector#trimToSize() */ public void test_trimToSize() { // Test for method void java.util.Vector.trimToSize() Vector v = new Vector(10); v.addElement(new Object()); v.trimToSize(); assertEquals("Failed to trim capacity", 1, v.capacity()); } public void test_removeRangeII() { MockVector mv = new MockVector(); mv.add("First"); mv.add("Second"); mv.add("One more"); mv.add("Last"); mv.removeRange(1, 3); assertTrue(mv.contains("First")); assertFalse(mv.contains("Second")); assertFalse(mv.contains("One more")); assertTrue(mv.contains("Last")); } protected Vector vectorClone(Vector s) { return (Vector) s.clone(); } public class MockVector extends Vector { @Override public synchronized int size() { return 0; } public void removeRange(int start, int end) { super.removeRange(start, end); } } /** * Sets up the fixture, for example, open a network connection. This method * is called before a test is executed. */ protected void setUp() { for (int i = 0; i < 100; i++) { tVector.addElement("Test " + i); } objArray = new Object[100]; for (int i = 0; i < 100; i++) { objArray[i] = "Test " + i; } } /** * Tears down the fixture, for example, close a network connection. This * method is called after a test is executed. */ protected void tearDown() { } }
/* * Copyright 2018 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.forms.integration.tests.valueprocessing; import java.io.IOException; import java.text.ParseException; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; import java.time.OffsetDateTime; import java.time.ZoneOffset; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Function; import javax.enterprise.inject.Instance; import com.google.common.io.Resources; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.kie.workbench.common.forms.dynamic.backend.server.context.generation.dynamic.impl.FormValuesProcessorImpl; import org.kie.workbench.common.forms.dynamic.backend.server.context.generation.dynamic.impl.fieldProcessors.MultipleSubFormFieldValueProcessor; import org.kie.workbench.common.forms.dynamic.backend.server.context.generation.dynamic.impl.fieldProcessors.SubFormFieldValueProcessor; import org.kie.workbench.common.forms.dynamic.backend.server.context.generation.dynamic.impl.fieldProcessors.TextAreaFormFieldValueProcessor; import org.kie.workbench.common.forms.dynamic.backend.server.context.generation.dynamic.impl.fieldProcessors.time.LocalDateFieldValueProcessor; import org.kie.workbench.common.forms.dynamic.service.context.generation.dynamic.BackendFormRenderingContext; import org.kie.workbench.common.forms.dynamic.service.context.generation.dynamic.FieldValueProcessor; import org.kie.workbench.common.forms.dynamic.service.context.generation.dynamic.FormValuesProcessor; import org.kie.workbench.common.forms.dynamic.service.shared.impl.MapModelRenderingContext; import org.kie.workbench.common.forms.fields.test.TestMetaDataEntryManager; import org.kie.workbench.common.forms.model.FieldDefinition; import org.kie.workbench.common.forms.model.FormDefinition; import org.kie.workbench.common.forms.services.backend.serialization.FormDefinitionSerializer; import org.kie.workbench.common.forms.services.backend.serialization.impl.FieldSerializer; import org.kie.workbench.common.forms.services.backend.serialization.impl.FormDefinitionSerializerImpl; import org.kie.workbench.common.forms.services.backend.serialization.impl.FormModelSerializer; import org.lesscss.deps.org.apache.commons.io.Charsets; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import static org.assertj.core.api.Java6Assertions.assertThat; import static org.kie.workbench.common.forms.integration.tests.valueprocessing.TestUtils.createDate; import static org.mockito.Mockito.when; @RunWith(MockitoJUnitRunner.class) public class FormValuesProcessorTest { private static TextAreaFormFieldValueProcessor textAreaFormFieldValueProcessor = new TextAreaFormFieldValueProcessor(); private static MultipleSubFormFieldValueProcessor multipleSubFormFieldValueProcessor = new MultipleSubFormFieldValueProcessor(); private static SubFormFieldValueProcessor subFormFieldValueProcessor = new SubFormFieldValueProcessor(); private static LocalDateFieldValueProcessor localDateFieldValueProcessor = new LocalDateFieldValueProcessor(); @Mock private Instance<FieldValueProcessor<? extends FieldDefinition, ?, ?>> instanceMock; private static FormDefinitionSerializer formSerializer = new FormDefinitionSerializerImpl(new FieldSerializer(), new FormModelSerializer(), new TestMetaDataEntryManager()); @Mock private static BackendFormRenderingContext context; @Mock private static MapModelRenderingContext mapModelRenderingContext; private static FormDefinition textAreaTaskForm, localDateTaskForm, subformTaskForm; private FormValuesProcessor formValuesProcessor; @BeforeClass public static void setup() throws IOException { textAreaTaskForm = getFormDefinition("TextareaTask-taskform.frm"); localDateTaskForm = getFormDefinition("LocalDateFieldTask-taskform.frm"); subformTaskForm = getFormDefinition("SubformTask-taskform.frm"); } @Test public void testTextAreaTaskFormValuesProcessing() throws ParseException { setupFormValuesProcessor(Collections.singletonList(textAreaFormFieldValueProcessor)); final String FIELD_BINDING = "_textarea"; //raw value of the property before it was updated final Date d1 = createDate("21/12/2012"); final Map<String, Object> originalRawValues = Collections.singletonMap(FIELD_BINDING, d1); //values in the form updated by user final Map<String, Object> originalFlatValues = Collections.singletonMap(FIELD_BINDING, d1.toString()); final Map<String, Object> writtenRawValues = formValuesProcessor.writeFormValues(textAreaTaskForm, originalFlatValues, originalRawValues, context); testReadingFormValues(textAreaTaskForm, writtenRawValues, originalFlatValues); //note: in this case it doesn't make sense to test the writeFormValues method, since it returns flatValues //(there is no way to reconstruct the Object back from the String value) } @Test public void testLocalDateFieldTaskFormValuesProcessing() { setupFormValuesProcessor(Collections.singletonList(localDateFieldValueProcessor)); final String LOCAL_DATE_BINDING = "_localDate_", LOCAL_DATE_TIME_BINDING = "_localDateTime_", LOCAL_TIME_BINDING = "_localTime_", OFFSET_DATE_TIME_BINDING = "_offsetDateTime_"; final LocalDate localDate1 = LocalDate.of(1989, 6, 6); final LocalDateTime localDateTime1 = LocalDateTime.of(2000, 5, 2, 3, 4); final LocalTime localTime1 = LocalTime.of(23, 15); //form engine does not support setting custom offset final ZoneOffset zoneOffset1 = OffsetDateTime.now().getOffset(); final OffsetDateTime offsetDateTime1 = OffsetDateTime.of(localDate1, localTime1, zoneOffset1); final Map<String, Object> originalRawValues = new HashMap<String, Object>() {{ put(LOCAL_DATE_BINDING, localDate1); put(LOCAL_DATE_TIME_BINDING, localDateTime1); put(LOCAL_TIME_BINDING, localTime1); put(OFFSET_DATE_TIME_BINDING, offsetDateTime1); }}; localDateFieldValueProcessor.init(); final Map<String, Object> readFlatValues = formValuesProcessor.readFormValues(localDateTaskForm, originalRawValues, context); final Map<String, Object> writtenRawValues = testWritingFormValues(localDateTaskForm, originalRawValues, readFlatValues, originalRawValues); testReadingFormValues(localDateTaskForm, writtenRawValues, readFlatValues); } @Test public void testSubformTaskFormValuesAreProcessedCorrectly() throws ParseException, IOException { setupFormValuesProcessor(Arrays.asList(subFormFieldValueProcessor, multipleSubFormFieldValueProcessor, textAreaFormFieldValueProcessor)); setupSubformTest(); final String SUBFORM_BINDING = "_subform", MULTIPLESUBFORM_BINDING = "multiplesubform"; final List<NestedDO> originalMultipleSubformRawValues = Arrays.asList( new NestedDO(true, "Joseph", "Hello\n my\n name\n is Joseph\n", 15, 1.564, createDate("06/06/1989"), 10.0, "2", "one"), new NestedDO(false, "John", "Hello\n my\n name\n is John\n", 100, 40.5684, createDate("17/11/1989"), 26.0, "2", "two"), new NestedDO(true, "Martin", "Hello\n my\n name\n is Martin\n", 520, 20.1569, createDate("11/09/2011"), 49.0, "3", "three") ); final SubformDO originalSubformRawValues = new SubformDO(true, "Joe", "This\n is\n not\n a joke!\n", 2, 3.14, createDate("06/06/1989"), 5.0, "2", "two", originalMultipleSubformRawValues); final Map<String, Object> originalRawValues = Collections.singletonMap(SUBFORM_BINDING, originalSubformRawValues); final List<Map<String, Object>> originalMultipleSubformFlatValues = Arrays.asList( initMultipleSubform(new HashMap<>(), FormFields::getFirstLineValue), initMultipleSubform(new HashMap<>(), FormFields::getSecondLineValue), initMultipleSubform(new HashMap<>(), FormFields::getThirdLineValue) ); final Map<String, Object> originalSubformFlatValues = new HashMap<String, Object>() {{ initSubform(this, FormFields::getSubformValue); put(MULTIPLESUBFORM_BINDING, originalMultipleSubformFlatValues); }}; final Map<String, Object> originalFlatValues = Collections.singletonMap(SUBFORM_BINDING, originalSubformFlatValues); final Map<String, Object> writtenRawValues = testWritingFormValues(subformTaskForm, originalRawValues, originalFlatValues, originalRawValues); testReadingFormValues(subformTaskForm, writtenRawValues, originalFlatValues); } private void setupSubformTest() throws IOException { final String SUBFORM_ID = "f31d37ce-8155-4478-a464-456c013236c6", CREATIONFORM_ID = "6544f16e-c765-451e-882d-8202f6ea824c", EDITIONFORM_ID = "dd1451bf-4f38-495b-8f16-b74711246797"; Map<String, FormDefinition> availableForms = new HashMap<String, FormDefinition>() {{ put(SUBFORM_ID, getFormDefinition("Subform.frm")); put(CREATIONFORM_ID, getFormDefinition("CreationForm.frm")); put(EDITIONFORM_ID, getFormDefinition("EditionForm.frm")); }}; when(context.getRenderingContext()).thenReturn(mapModelRenderingContext); when(mapModelRenderingContext.getAvailableForms()).thenReturn(availableForms); when(context.getClassLoader()).thenReturn(this.getClass().getClassLoader()); } private Map<String, Object> testReadingFormValues(FormDefinition taskForm, Map<String, Object> originalRawValues, Map<String, Object> expectedFlatValues) { final Map<String, Object> originalFlatValues = formValuesProcessor.readFormValues(taskForm, originalRawValues, context); assertThat(originalFlatValues).isEqualTo(expectedFlatValues); return originalFlatValues; } private Map<String, Object> testWritingFormValues(FormDefinition taskForm, Map<String, Object> originalRawValues, Map<String, Object> updatedFlatValues, Map<String, Object> expectedUpdatedRawValues) { final Map<String, Object> updatedRawValues = formValuesProcessor.writeFormValues(taskForm, updatedFlatValues, originalRawValues, context); assertThat(updatedRawValues).isEqualTo(expectedUpdatedRawValues); return updatedRawValues; } private static FormDefinition getFormDefinition(String formName) throws IOException { return formSerializer.deserialize(loadTaskForm(formName)); } private static String loadTaskForm(String taskForm) throws IOException { return Resources.toString(FormValuesProcessorTest.class.getResource(taskForm), Charsets.UTF_8); } private void setupFormValuesProcessor(List<FieldValueProcessor<? extends FieldDefinition, ?, ?>> processors) { when(instanceMock.iterator()).thenReturn(processors.iterator()); formValuesProcessor = new FormValuesProcessorImpl(instanceMock); } private Map<String, Object> initMultipleSubform(Map<String, Object> map, Function<FormFields, Object> formValueProducer) { for (FormEngineFields value : FormEngineFields.values()) { map.put(value.getBinding(), formValueProducer.apply(value)); } return initSubform(map, formValueProducer); } private Map<String, Object> initSubform(Map<String, Object> map, Function<FormFields, Object> valueProducer) { for (SubformFields value : SubformFields.values()) { map.put(value.getBinding(), valueProducer.apply(value)); } return map; } }
/* * Copyright 2016 DiffPlug * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.diffplug.common.base; import java.io.IOException; import java.io.OutputStream; import java.io.PrintStream; import java.io.PrintWriter; import java.io.Writer; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.nio.charset.CoderResult; import java.nio.charset.CodingErrorAction; import java.nio.charset.StandardCharsets; import java.util.function.Consumer; /** * Pipes strings to a {@code Consumer<String>} through an API similar to PrintWriter and PrintStream. * * Can present itself as an {@link OutputStream}, {@link PrintStream}, {@link Writer}, or {@link PrintWriter}. */ public class StringPrinter { private final Consumer<String> consumer; /** StringPrinter will pass all the strings it receives to the given consumer. */ public StringPrinter(Consumer<String> consumer) { this.consumer = consumer; } /** Prints the string and a newline (always '\n'). */ public void println(String line) { consumer.accept(line); consumer.accept("\n"); } /** Prints the string. */ public void print(String content) { consumer.accept(content); } /** Easy way to create a String using a StringPrinter. */ public static String buildString(Consumer<StringPrinter> printer) { StringBuilder builder = new StringBuilder(); printer.accept(new StringPrinter(builder::append)); return builder.toString(); } /** Easy way to create a String from a bunch of lines. */ public static String buildStringFromLines(String... lines) { int numChars = lines.length; for (String line : lines) { numChars += line.length(); } StringBuilder builder = new StringBuilder(numChars); for (String line : lines) { builder.append(line); builder.append('\n'); } return builder.toString(); } /** * Creates an OutputStream which will print its content to the given StringPrinter, encoding bytes according to the given Charset. * Doesn't matter if you close the stream or not, because StringPrinter doesn't have a close(). * <p> * Strings are sent to the consumer as soon as their constituent bytes are written to this OutputStream. * <p> * The implementation is lifted from Apache commons-io. Many thanks to them! */ public OutputStream toOutputStream(Charset charset) { CharsetDecoder decoder = charset.newDecoder() .onMalformedInput(CodingErrorAction.REPLACE) .onUnmappableCharacter(CodingErrorAction.REPLACE) .replaceWith("?"); ByteBuffer decoderIn = ByteBuffer.allocate(DECODER_BUFFER); CharBuffer decoderOut = CharBuffer.allocate(DECODER_BUFFER); return new OutputStream() { @Override public void write(final int b) throws IOException { write(new byte[]{(byte) b}); } @Override public void write(byte[] b) throws IOException { write(b, 0, b.length); } @Override public void write(byte[] b, int off, int len) throws IOException { while (len > 0) { final int c = Math.min(len, decoderIn.remaining()); decoderIn.put(b, off, c); processInput(false); len -= c; off += c; } flushOutput(); } private void processInput(final boolean endOfInput) throws IOException { // Prepare decoderIn for reading decoderIn.flip(); CoderResult coderResult; while (true) { coderResult = decoder.decode(decoderIn, decoderOut, endOfInput); if (coderResult.isOverflow()) { flushOutput(); } else if (coderResult.isUnderflow()) { break; } else { // The decoder is configured to replace malformed input and unmappable characters, // so we should not get here. throw new IOException("Unexpected coder result"); } } // Discard the bytes that have been read decoderIn.compact(); } private void flushOutput() throws IOException { if (decoderOut.position() > 0) { consumer.accept(new String(decoderOut.array(), 0, decoderOut.position())); decoderOut.rewind(); } } }; } /** Buffer size for decoding characters. */ private static final int DECODER_BUFFER = 128; /** Creates a UTF-8 PrintStream which passes its content to this StringPrinter. */ public PrintStream toPrintStream() { return toPrintStream(StandardCharsets.UTF_8); } /** Creates a PrintStream of the given charset, which passes its content to this StringPrinter. */ public PrintStream toPrintStream(Charset charset) { return Errors.rethrow().get(() -> { return new PrintStream(toOutputStream(charset), true, charset.name()); }); } /** Creates a Writer which passes its content to this StringPrinter. */ public Writer toWriter() { return new Writer() { @Override public Writer append(char c) { consumer.accept(new String(new char[]{c})); return this; } @Override public Writer append(CharSequence csq) { if (csq instanceof String) { consumer.accept((String) csq); } else { consumer.accept(toStringSafely(csq)); } return this; } @Override public Writer append(CharSequence csq, int start, int end) { if (csq instanceof String) { consumer.accept(((String) csq).substring(start, end)); } else { consumer.accept(toStringSafely(csq.subSequence(start, end))); } return this; } private String toStringSafely(CharSequence csq) { String asString = csq.toString(); if (asString.length() == csq.length()) { return asString; } else { // It's pretty easy to implement CharSequence.toString() incorrectly // http://stackoverflow.com/a/15870428/1153071 // but for String, we know we won't have them, thus the fast-path above Errors.log().accept(new IllegalArgumentException(csq.getClass() + " did not implement toString() correctly.")); char[] chars = new char[csq.length()]; for (int i = 0; i < chars.length; ++i) { chars[i] = csq.charAt(i); } return new String(chars); } } @Override public void close() throws IOException {} @Override public void flush() throws IOException {} @Override public void write(char[] cbuf, int off, int len) throws IOException { consumer.accept(new String(cbuf, off, len)); } @Override public void write(String str) { consumer.accept(str); } @Override public void write(String str, int off, int len) { consumer.accept(str.substring(off, off + len)); } }; } /** Creates a PrintWriter which passes its content to this StringPrinter. */ public PrintWriter toPrintWriter() { boolean autoflush = true; return new PrintWriter(toWriter(), autoflush); } /** * Given a consumer of lines, creates a stateful consumer of strings * which will combine its input until it finds a newline, and * split its input when it contains multiple newlines. Examples * make this more clear: * <pre> * "some", "\n", "simple ", "lines", "\n" -> "some", "simple lines" * "some\nsimple lines\n" -> "some", "simple lines" * "no newline\nno output" -> "no newline" * </pre> * @param perLine a Consumer<String> which will receive strings which were terminated by newlines (but aren't anymore). * @return a Consumer<String> which accepts any strings, and will feed them to perLine. */ public static Consumer<String> stringsToLines(Consumer<String> perLine) { Box<String> leftover = Box.of(""); return rawString -> { rawString = leftover.get() + rawString.replace("\r", ""); int lastIdx = 0; int idx = 0; while ((idx = rawString.indexOf('\n', lastIdx)) > -1) { perLine.accept(rawString.substring(lastIdx, idx)); lastIdx = idx + 1; } leftover.set(rawString.substring(lastIdx)); }; } /** Returns a StringPrinter for {@link System#out}. */ public static StringPrinter systemOut() { return new StringPrinter(System.out::print); } /** Returns a StringPrinter for {@link System#err}. */ public static StringPrinter systemErr() { return new StringPrinter(System.err::print); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.waveprotocol.wave.model.supplement; import org.waveprotocol.wave.model.adt.ObservableBasicMap; import org.waveprotocol.wave.model.adt.ObservableBasicSet; import org.waveprotocol.wave.model.adt.ObservableBasicValue; import org.waveprotocol.wave.model.adt.ObservableMonotonicMap; import org.waveprotocol.wave.model.adt.docbased.DocumentBasedBasicSet; import org.waveprotocol.wave.model.adt.docbased.DocumentBasedBoolean; import org.waveprotocol.wave.model.adt.docbased.DocumentBasedMonotonicMap; import org.waveprotocol.wave.model.adt.docbased.DocumentBasedMonotonicValue; import org.waveprotocol.wave.model.conversation.InboxState; import org.waveprotocol.wave.model.document.ObservableDocument; import org.waveprotocol.wave.model.document.ObservableMutableDocument; import org.waveprotocol.wave.model.document.util.DefaultDocumentEventRouter; import org.waveprotocol.wave.model.document.util.DocumentEventRouter; import org.waveprotocol.wave.model.id.WaveletId; import org.waveprotocol.wave.model.id.WaveletIdSerializer; import org.waveprotocol.wave.model.util.CopyOnWriteSet; import org.waveprotocol.wave.model.util.ReadableStringMap; import org.waveprotocol.wave.model.util.Serializer; import org.waveprotocol.wave.model.util.ValueUtils; import org.waveprotocol.wave.model.version.HashedVersion; import org.waveprotocol.wave.model.version.HashedVersionSerializer; import org.waveprotocol.wave.model.wave.Wavelet; import java.util.Set; /** * Implementation of the supplement ADT that uses a wavelet as the underlying * data structure. * * The wavelet uses separate documents for each of: * <ul> * <li>read-state</li> * <li>folder state</li> * <li>archiving state</li> * <li>muted state</li> * <li>abuse</li> * <li>seen-state and pending-notifications state</li> * </ul> * * The read state is tracked in an element per wavelet. That element tracks blip * read-versions, the participants read-version, and the wavelet-override * version, using the {@link DocumentBasedMonotonicMap} and * {@link DocumentBasedMonotonicValue} embeddings. Below is an example state of * the read-state document. * * <pre> * &lt;data&gt; * &lt;wavelet i=&quot;example.org!conv+root&quot;&gt; * &lt;all v=&quot;25&quot;/&gt; * &lt;participants v=&quot;12&quot;/&gt; * &lt;blip i=&quot;8fJd77*2&quot; v=&quot;7&quot;/&gt; * &lt;all v=&quot;28&quot;/&gt; * &lt;blip i=&quot;8fJd77*7&quot; v=&quot;38&quot;/&gt; * &lt;blip i=&quot;8fJd77*7&quot; v=&quot;11&quot;/&gt; * &lt;/wavelet&gt; * &lt;wavelet i=&quot;conversation/dRwppo8*34&quot;&gt; * &lt;blip i=&quot;dRwppo8*35&quot; v=&quot;4&quot;/&gt; * &lt;blip i=&quot;dRwppo8*36&quot; v=&quot;15&quot;/&gt; * &lt;/wavelet&gt; * &lt;/data&gt; * </pre> * * The interpretation of that state, as provided by * {@link DocumentBasedMonotonicMap} and {@link DocumentBasedMonotonicValue}, * is: * <ul> * <li>wavelet {@code example.org!conv+root} has a wavelet read-version of 28, a * participants read-version of 12, a read version of 7 for blip {@code * 8fJd77*2}, and a read version of 38 for blip {@code 8fJd77*7}.</li> * <li>wavelet {@code conversation/dRwppo8*34} has a read version of 4 for blip * {@code dRwppo8*35} and a read version of 15 for blip {@code dRwppo8*36}.</li> * </ul> * * The folder state is tracked in an element per folder, according to the * {@link DocumentBasedBasicSet} embedding. An example folder state: * * <pre> * &lt;data&gt; * &lt;folder i=&quot;3&quot;/&gt; * &lt;folder i=&quot;12&quot;/&gt; * &lt;folder i=&quot;12&quot;/&gt; * &lt;/data&gt; * </pre> * * The interpretation of that state is that the wave is in folders 3 and 12. * * The archiving state is tracked in an element per wavelet, with a map of * wavelet ids and versions. When a wave is archived, all the versions of its * conversation wavelets are saved in the archiving document. * * An example archiving state: * * <pre> * &lt;data&gt; * &lt;archive i=&quot;example.org!conv+root&quot; v=&quot;48&quot; /&gt; * &lt;archive i=&quot;conversation/dRwppo8*34&quot; v=&quot;15&quot; /&gt; * &lt;/data&gt; * </pre> * * The interpretation of that state is that the wave is * {@link InboxState#ARCHIVE archived} as long as its wavelet versions don't go * above 48 for "root" and 15 for "dRwppo8*34". * * The muted state is reflected by two boolean values, stored in two separate * documents. TODO(hearnden/flopiano): improve the handling of mute and clear to * be less wasteful. * * Example: * * <pre> * &lt;data muted=&quot;true&quot; /&gt; * </pre> * * Abuse State is managed by the {@link DocumentBasedAbuseStore} class. * */ public final class WaveletBasedSupplement implements ObservablePrimitiveSupplement { public static final String READSTATE_DOCUMENT = "m/read"; public static final String PRESENTATION_DOCUMENT = "m/presentation"; public static final String FOLDERS_DOCUMENT = "m/folder"; public static final String ARCHIVING_DOCUMENT = "m/archiving"; public static final String MUTED_DOCUMENT = "m/muted"; public static final String CLEARED_DOCUMENT = "m/cleared"; public static final String ABUSE_DOCUMENT = "m/abuse"; public static final String SEEN_DOCUMENT = "m/seen"; public static final String GADGETS_DOCUMENT = "m/gadgets"; public static final String SEEN_VERSION_TAG = "seen"; public static final String NOTIFIED_VERSION_TAG = "notified"; public static final String WAVELET_TAG = "wavelet"; public static final String BLIP_READ_TAG = "blip"; public static final String PARTICIPANTS_READ_TAG = "participants"; public static final String TAGS_READ_TAG = "tags"; public static final String WAVELET_READ_TAG = "all"; public static final String CONVERSATION_TAG = "conversation"; public static final String THREAD_TAG = "thread"; public static final String BLIP_TAG = "blip"; public static final String ARCHIVE_TAG = "archive"; public static final String VERSION_ATTR = "v"; public static final String ID_ATTR = "i"; public static final String FOLDER_TAG = "folder"; public static final String MUTED_TAG = "muted"; public static final String MUTED_ATTR = "muted"; public static final String CLEARED_TAG = "cleared"; public static final String CLEARED_ATTR = "cleared"; public static final String SIGNATURE_ATTR = "signature"; public static final String STATE_ATTR = "state"; public static final String NOTIFICATION_TAG = "notification"; public static final String PENDING_NOTIFICATION_ATTR = "pending"; public static final String GADGET_TAG = "gadget"; public static final String PERMISSIONS_ATTR = "p"; public static final String STATE_TAG = "state"; public static final String NAME_ATTR = "name"; public static final String VALUE_ATTR = "value"; /** Collection of per-wavelet read states. */ private final WaveletReadStateCollection<?> read; /** Collection of per-wavelet collapsed states. */ private final WaveletThreadStateCollection<?> collapsed; /** Folder state, exposed as a set. */ private final ObservableBasicSet<Integer> folders; /** mute state, exposed as a value. */ private final ObservableBasicValue<Boolean> muted; /** Archived state, per wavelet. */ private final ObservableMonotonicMap<WaveletId, Integer> waveletArchiveVersions; /** Last seen version + hash signature of wavelet **/ private final ObservableBasicMap<WaveletId, HashedVersion> seenVersion; /** Last notified version + hash signature of wavelet **/ private final ObservableBasicMap<WaveletId, Integer> notifiedVersion; /** Notification state */ private final ObservableBasicValue<Boolean> pendingNotification; /** Raw wanted evaluation data. */ private final ObservableAbuseStore abuseStore; /** Gadget states. **/ private final GadgetStateCollection<?> gadgetStates; /** * This boolean overrides the waveletArchiveVersions whenever a new archive * version is added, this boolean is set to false. When the method * {@link #clearArchiveState()} is invoked, this value is set to true. This * state is not exposed, instead it is used as a way to override the * monotonicity of {@link #waveletArchiveVersions}. */ private final ObservableBasicValue<Boolean> archiveCleared; private final CopyOnWriteSet<Listener> listeners = CopyOnWriteSet.create(); /** Forwards read-state and thread-state events. */ private final Listener forwardingListener = new Listener() { @Override public void onLastReadBlipVersionChanged(WaveletId wid, String bid, int oldVersion, int newVersion) { triggerOnLastReadBlipVersionChanged(wid, bid, oldVersion, newVersion); } @Override public void onLastReadParticipantsVersionChanged( WaveletId wid, int oldVersion, int newVersion) { triggerOnLastReadParticipantsVersionChanged(wid, oldVersion, newVersion); } @Override public void onLastReadTagsVersionChanged(WaveletId wid, int oldVersion, int newVersion) { triggerOnLastReadTagsVersionChanged(wid, oldVersion, newVersion); } @Override public void onLastReadWaveletVersionChanged(WaveletId wid, int oldVersion, int newVersion) { triggerOnLastReadWaveletVersionChanged(wid, oldVersion, newVersion); } @Override public void onThreadStateChanged(WaveletId waveletId, String threadId, ThreadState oldState, ThreadState newState) { triggerOnThreadStateChanged(waveletId, threadId, oldState, newState); } @Override public void onFollowed() { } @Override public void onUnfollowed() { } @Override public void onFollowCleared() { } @Override public void onArchiveVersionChanged(WaveletId wid, int oldVersion, int newVersion) { } @Override public void onArchiveClearChanged(boolean oldValue, boolean newValue) { } @Override public void onFolderAdded(int newFolder) { } @Override public void onFolderRemoved(int oldFolder) { } @Override public void onWantedEvaluationsChanged(WaveletId waveletId) { } @Override public void onGadgetStateChanged( String gadgetId, String key, String oldValue, String newValue) { triggerOnGadgetStateChanged(gadgetId, key, oldValue, newValue); } }; /** * Creates a supplement. * * @param userData wavelet to hold the supplement state */ private WaveletBasedSupplement(Wavelet userData) { folders = fungeCreateFolders(userData.getDocument(FOLDERS_DOCUMENT)); muted = fungeCreateMuted(userData.getDocument(MUTED_DOCUMENT)); waveletArchiveVersions = fungeCreateWaveletArchiveState( userData.getDocument(ARCHIVING_DOCUMENT)); archiveCleared = fungeCreateCleared(userData.getDocument(CLEARED_DOCUMENT)); ObservableDocument readState = userData.getDocument(READSTATE_DOCUMENT); read = fungeCreateReadState(readState, forwardingListener); collapsed = fungeCreateCollapsedState( userData.getDocument(PRESENTATION_DOCUMENT), forwardingListener); abuseStore = fungeCreateAbuseStore(userData.getDocument(ABUSE_DOCUMENT)); seenVersion = fungeCreateSeenVersion(userData.getDocument(SEEN_DOCUMENT)); notifiedVersion = fungeCreateNotifiedVersion(userData.getDocument(SEEN_DOCUMENT)); pendingNotification = fungeCreatePendingNotification( userData.getDocument(SEEN_DOCUMENT)); gadgetStates = fungeCreateGadgetStates(userData.getDocument(GADGETS_DOCUMENT), forwardingListener); hackCleanup(); // Cleanup before installing listeners, so we start from // clean state. installListeners(); } private void hackCleanup() { // Explicitly remove Inbox and All folder tokens if they are present. folders.remove(1); folders.remove(3); } /** * Installs listeners on the component structures, so that this object can * broadcast events. */ private void installListeners() { muted.addListener(new ObservableBasicValue.Listener<Boolean>() { @Override public void onValueChanged(Boolean oldValue, Boolean newValue) { if (!ValueUtils.equal(oldValue, newValue)) { // Notify based only on new value if (newValue == null) { triggerOnFollowCleared(); } else if (newValue) { triggerOnUnfollowed(); } else { triggerOnFollowed(); } } } }); folders.addListener(new ObservableBasicSet.Listener<Integer>() { @Override public void onValueAdded(Integer newValue) { triggerOnFolderAdded(newValue); } @Override public void onValueRemoved(Integer oldValue) { triggerOnFolderRemoved(oldValue); } }); archiveCleared.addListener(new ObservableBasicValue.Listener<Boolean>() { @Override public void onValueChanged(Boolean oldValue, Boolean newValue) { if (!ValueUtils.equal(oldValue, newValue)) { triggerOnArchiveClearChanged(valueOf(oldValue), valueOf(newValue)); } } }); waveletArchiveVersions.addListener(new ObservableMonotonicMap.Listener<WaveletId, Integer>() { @Override public void onEntrySet(WaveletId waveletId, Integer oldValue, Integer newValue) { triggerOnArchiveVersionChanged(waveletId, valueOf(oldValue), valueOf(newValue)); } }); abuseStore.addListener(new ObservableAbuseStore.Listener() { @Override public void onEvaluationAdded(WantedEvaluation newEvaluation) { triggerOnWantedEvaluationAdded(newEvaluation); } }); } private static int valueOf(Integer version) { return version != null ? version : NO_VERSION; } private static boolean valueOf(Boolean value) { return value != null ? value : false; } /** * Creates a supplement. * * @param userData wavelet to hold the supplement state */ public static WaveletBasedSupplement create(Wavelet userData) { return new WaveletBasedSupplement(userData); } @Override public void setLastReadBlipVersion(WaveletId waveletId, String blipId, int version) { read.setLastReadBlipVersion(waveletId, blipId, version); } @Override public void setLastReadParticipantsVersion(WaveletId waveletId, int version) { read.setLastReadParticipantsVersion(waveletId, version); } @Override public void setLastReadTagsVersion(WaveletId waveletId, int version) { read.setLastReadTagsVersion(waveletId, version); } @Override public void setLastReadWaveletVersion(WaveletId waveletId, int version) { read.setLastReadWaveletVersion(waveletId, version); } @Override public void clearReadState() { read.clear(); } @Override public void clearBlipReadState(WaveletId waveletId, String blipId) { read.clearBlipReadState(waveletId, blipId); } @Override public int getLastReadBlipVersion(WaveletId waveletId, String blipId) { return read.getLastReadBlipVersion(waveletId, blipId); } @Override public int getLastReadParticipantsVersion(WaveletId waveletId) { return read.getLastReadParticipantsVersion(waveletId); } @Override public int getLastReadTagsVersion(WaveletId waveletId) { return read.getLastReadTagsVersion(waveletId); } @Override public int getLastReadWaveletVersion(WaveletId waveletId) { return read.getLastReadWaveletVersion(waveletId); } @Override public Iterable<String> getReadBlips(WaveletId waveletId) { return read.getReadBlips(waveletId); } @Override public Iterable<WaveletId> getReadWavelets() { return read.getReadWavelets(); } // // Thread State concerns // @Override public ThreadState getThreadState(WaveletId wid, String threadId) { return collapsed.getThreadState(wid, threadId); } @Override public void setThreadState(WaveletId wid, String threadId, ThreadState newState) { collapsed.setThreadState(wid, threadId, newState); } @Override public Iterable<String> getStatefulThreads(WaveletId waveletId) { return collapsed.getStatefulThreads(waveletId); } @Override public Iterable<WaveletId> getWaveletsWithThreadState() { return collapsed.getStatefulWavelets(); } // // Folder concerns // @Override public void addFolder(int id) { folders.add(id); } @Override public void removeAllFolders() { folders.clear(); } @Override public void removeFolder(int id) { folders.remove(id); } public Iterable<Integer> getFolders() { return folders.getValues(); } @Override public boolean isInFolder(int id) { return folders.contains(id); } // // Inbox concerns // private boolean isCleared() { Boolean value = archiveCleared.get(); return value != null ? value : false; } @Override public void follow() { muted.set(false); } @Override public void unfollow() { muted.set(true); } @Override public void clearFollow() { muted.set(null); } @Override public Boolean getFollowed() { return inverse(muted.get()); } private static Boolean inverse(Boolean b) { return b != null ? !b : null; } @Override public int getArchiveWaveletVersion(WaveletId waveletId) { if (isCleared()) { return NO_VERSION; } Integer version = waveletArchiveVersions.get(waveletId); return version != null ? version : NO_VERSION; } @Override public void archiveAtVersion(WaveletId waveletId, int waveletVersion) { waveletArchiveVersions.put(waveletId, waveletVersion); if (isCleared()) { archiveCleared.set(false); } } @Override public void clearArchiveState() { // TODO(user) // Currently it is not possible to clear a monotonic map, or to remove a // document. // Instead of actually clearing the archive state, here we set a private // flag to true. if (!isCleared()) { archiveCleared.set(true); } } @Override public Set<WaveletId> getSeenWavelets() { return seenVersion.keySet(); } @Override public Set<WaveletId> getNotifiedWavelets() { return notifiedVersion.keySet(); } @Override public HashedVersion getSeenVersion(WaveletId waveletId) { HashedVersion seenSignature = seenVersion.get(waveletId); if (null == seenSignature) { return HashedVersion.unsigned(0); } return seenSignature; } @Override public void setSeenVersion(WaveletId waveletId, HashedVersion signature) { seenVersion.put(waveletId, signature); } @Override public void clearSeenVersion(WaveletId waveletId) { seenVersion.remove(waveletId); } @Override public Iterable<WaveletId> getArchiveWavelets() { return waveletArchiveVersions.keySet(); } // // Wanted handling - forward to abuse store. // @Override public Set<WantedEvaluation> getWantedEvaluations() { return abuseStore.getWantedEvaluations(); } @Override public void addWantedEvaluation(WantedEvaluation evaluation) { abuseStore.addWantedEvaluation(evaluation); } // Notifications @Override public boolean getPendingNotification() { Boolean pending = pendingNotification.get(); return pending != null && pending; } @Override public int getNotifiedVersion(WaveletId waveletId) { Integer version = notifiedVersion.get(waveletId); return version != null ? version : NO_VERSION; } @Override public void setNotifiedVersion(WaveletId waveletId, int signature) { notifiedVersion.put(waveletId, signature); } @Override public void clearPendingNotification() { pendingNotification.set(null); } // // Gadget states // @Override public ReadableStringMap<String> getGadgetState(String gadgetId) { return gadgetStates.getGadgetState(gadgetId); } @Override public void setGadgetState(String gadgetId, String key, String value) { gadgetStates.setGadgetState(gadgetId, key, value); } // // Observable aspect // @Override public void addListener(Listener listener) { listeners.add(listener); } @Override public void removeListener(Listener listener) { listeners.remove(listener); } private void triggerOnLastReadBlipVersionChanged( WaveletId waveletId, String blipId, int oldVersion, int newVersion) { for (Listener listener : listeners) { listener.onLastReadBlipVersionChanged(waveletId, blipId, oldVersion, newVersion); } } private void triggerOnLastReadParticipantsVersionChanged( WaveletId waveletId, int oldVersion, int newVersion) { for (Listener listener : listeners) { listener.onLastReadParticipantsVersionChanged(waveletId, oldVersion, newVersion); } } private void triggerOnLastReadTagsVersionChanged( WaveletId waveletId, int oldVersion, int newVersion) { for (Listener listener : listeners) { listener.onLastReadTagsVersionChanged(waveletId, oldVersion, newVersion); } } private void triggerOnLastReadWaveletVersionChanged( WaveletId waveletId, int oldVersion, int newVersion) { for (Listener listener : listeners) { listener.onLastReadWaveletVersionChanged(waveletId, oldVersion, newVersion); } } private void triggerOnFollowed() { for (Listener listener : listeners) { listener.onFollowed(); } } private void triggerOnUnfollowed() { for (Listener listener : listeners) { listener.onUnfollowed(); } } private void triggerOnFollowCleared() { for (Listener listener : listeners) { listener.onFollowCleared(); } } private void triggerOnFolderAdded(int newFolder) { for (Listener listener : listeners) { listener.onFolderAdded(newFolder); } } private void triggerOnFolderRemoved(int oldFolder) { for (Listener listener : listeners) { listener.onFolderAdded(oldFolder); } } private void triggerOnArchiveVersionChanged( WaveletId waveletId, int oldVersion, int newVersion) { for (Listener listener : listeners) { listener.onArchiveVersionChanged(waveletId, oldVersion, newVersion); } } private void triggerOnArchiveClearChanged(boolean oldValue, boolean newValue) { for (Listener listener : listeners) { listener.onArchiveClearChanged(oldValue, newValue); } } private void triggerOnWantedEvaluationAdded(WantedEvaluation newEvaluation) { WaveletId waveletId = newEvaluation.getWaveletId(); if (waveletId == null) { return; } for (Listener listener : listeners) { listener.onWantedEvaluationsChanged(waveletId); } } private void triggerOnThreadStateChanged(WaveletId waveletId, String threadId, ThreadState oldState, ThreadState newState) { for (Listener listener : listeners) { listener.onThreadStateChanged(waveletId, threadId, oldState, newState); } } private void triggerOnGadgetStateChanged( String gadgetId, String key, String oldValue, String newValue) { for (Listener listener : listeners) { listener.onGadgetStateChanged(gadgetId, key, oldValue, newValue); } } // // Factory methods for component structures. // /** * Exposes a document as a boolean, suitable for holding muted state. * * @param router router for the muted document * @return muted state. */ private static <E> ObservableBasicValue<Boolean> createMuted( DocumentEventRouter<? super E, E, ?> router) { return DocumentBasedBoolean.create(router, router.getDocument().getDocumentElement(), MUTED_TAG, MUTED_ATTR); } /** * Exposes a document as a boolean, suitable for holding muted state. * * @param router router for the muted document * @return muted state. */ private static <E> ObservableBasicValue<Boolean> createCleared( DocumentEventRouter<? super E, E, ?> router) { return DocumentBasedBoolean.create(router, router.getDocument().getDocumentElement(), CLEARED_TAG, CLEARED_ATTR); } /** * Exposes a document as an integer set, suitable for holding folder state. * * @param router router for the folders document * @return folder state. */ private static <E> ObservableBasicSet<Integer> createFolders( DocumentEventRouter<? super E, E, ?> router) { return DocumentBasedBasicSet.create(router, router.getDocument().getDocumentElement(), Serializer.INTEGER, FOLDER_TAG, ID_ATTR); } /** * Exposes a document as a map from wavelet ids to version numbers. * * @param router router for the archiving document * @return archiving state. */ private static <E> ObservableMonotonicMap<WaveletId, Integer> createWaveletArchiveState( DocumentEventRouter<? super E, E, ?> router) { return DocumentBasedMonotonicMap.create(router, router.getDocument().getDocumentElement(), WaveletIdSerializer.INSTANCE, Serializer.INTEGER, ARCHIVE_TAG, ID_ATTR, VERSION_ATTR); } /** * Exposes a document as a boolean, suitable for holding pending-notification state. * * @param router router for the notification document * @return pending notification state. */ private static <E> ObservableBasicValue<Boolean> createPendingNotification( DocumentEventRouter<? super E, E, ?> router) { return DocumentBasedBoolean.create( router, router.getDocument().getDocumentElement(), NOTIFICATION_TAG, PENDING_NOTIFICATION_ATTR); } /** * Exposes a document as a collection of per-wavelet read-state objects. * * @param router router for read-state document * @return wavelet read-state. */ private static <E> WaveletReadStateCollection<E> createWaveletReadState( DocumentEventRouter<? super E, E, ?> router, Listener listener) { E container = router.getDocument().getDocumentElement(); return WaveletReadStateCollection.create(router, container, listener); } /** * Exposes a document as a collection of per-wavelet thread-state objects. * * @param router router for the thread-state document * @return wavelet thread state collection. */ private static <E> WaveletThreadStateCollection<E> createWaveletCollapsedState( DocumentEventRouter<? super E, E, ?> router, Listener listener) { return WaveletThreadStateCollection.create(router, router.getDocument().getDocumentElement(), listener); } /** * Exposes a document as a map of per-wavelet seen-version/seen-hash pairs. * * @param router router for the seen-state document * @return wavelet seen version and hash signature. */ private static <E> ObservableBasicMap<WaveletId, HashedVersion> createWaveletSeenVersion( DocumentEventRouter<? super E, E, ?> router) { return DocumentBasedMonotonicMap.create(router, router.getDocument().getDocumentElement(), WaveletIdSerializer.INSTANCE, HashedVersionSerializer.INSTANCE, SEEN_VERSION_TAG, ID_ATTR, SIGNATURE_ATTR); } /** * Exposes a document as a map of per-wavelet notified-versions. * * @param router router for the notified-state document * @return wavelet notified version. */ private static <E> ObservableBasicMap<WaveletId, Integer> createWaveletNotifiedVersion( DocumentEventRouter<? super E, E, ?> router) { return DocumentBasedMonotonicMap.create(router, router.getDocument().getDocumentElement(), WaveletIdSerializer.INSTANCE, Serializer.INTEGER, NOTIFIED_VERSION_TAG, ID_ATTR, VERSION_ATTR); } /** * Exposes a document as a map of maps of gadget states. * * @param router router for the gadget state document. * @param listener event listener. * @return gadget state collection object to access gadgets states by gadget * ID and state name. */ private static <E> GadgetStateCollection<E> createGadgetStatesDoc( DocumentEventRouter<? super E, E, ?> router, Listener listener) { return GadgetStateCollection.create(router, router.getDocument().getDocumentElement(), listener); } /** * Exposes a document as a set of {@link WantedEvaluation}s. * * @param router abuse document * @return all wanted evaluations for the wave. */ private static <E> ObservableAbuseStore createAbuseStore( DocumentEventRouter<? super E, E, ?> router) { return DocumentBasedAbuseStore.create(router); } // // Funge methods for unfurling generics, required only for Sun JDK compiler. // private static <N> ObservableBasicSet<Integer> fungeCreateFolders( ObservableMutableDocument<N, ?, ?> doc) { return createFolders(DefaultDocumentEventRouter.create(doc)); } private static <N> ObservableBasicValue<Boolean> fungeCreateMuted( ObservableMutableDocument<N, ?, ?> doc) { return createMuted(DefaultDocumentEventRouter.create(doc)); } private static <N> ObservableBasicValue<Boolean> fungeCreateCleared( ObservableMutableDocument<N, ?, ?> doc) { return createCleared(DefaultDocumentEventRouter.create(doc)); } private static <N> ObservableMonotonicMap<WaveletId, Integer> fungeCreateWaveletArchiveState( ObservableMutableDocument<N, ?, ?> doc) { return createWaveletArchiveState(DefaultDocumentEventRouter.create(doc)); } private static <N> ObservableBasicValue<Boolean> fungeCreatePendingNotification( ObservableMutableDocument<N, ?, ?> doc) { return createPendingNotification(DefaultDocumentEventRouter.create(doc)); } private static <N> WaveletReadStateCollection<?> fungeCreateReadState( ObservableMutableDocument<N, ?, ?> doc, Listener listener) { return createWaveletReadState(DefaultDocumentEventRouter.create(doc), listener); } private static <N> WaveletThreadStateCollection<?> fungeCreateCollapsedState( ObservableMutableDocument<N, ?, ?> doc, Listener listener) { return createWaveletCollapsedState(DefaultDocumentEventRouter.create(doc), listener); } private static <N> ObservableBasicMap<WaveletId, HashedVersion> fungeCreateSeenVersion( ObservableMutableDocument<N, ?, ?> doc) { return createWaveletSeenVersion(DefaultDocumentEventRouter.create(doc)); } private static <N> ObservableBasicMap<WaveletId, Integer> fungeCreateNotifiedVersion( ObservableMutableDocument<N, ?, ?> doc) { return createWaveletNotifiedVersion(DefaultDocumentEventRouter.create(doc)); } private static <N> ObservableAbuseStore fungeCreateAbuseStore( ObservableMutableDocument<N, ?, ?> doc) { return createAbuseStore(DefaultDocumentEventRouter.create(doc)); } private static <N> GadgetStateCollection<?> fungeCreateGadgetStates( ObservableMutableDocument<N, ?, ?> doc, Listener listener) { return createGadgetStatesDoc(DefaultDocumentEventRouter.create(doc), listener); } }
/* * Copyright (C) 2013 Glyptodon LLC * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.glyptodon.guacamole.net.basic; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.util.Collection; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import javax.xml.bind.DatatypeConverter; import org.glyptodon.guacamole.GuacamoleClientException; import org.glyptodon.guacamole.GuacamoleException; import org.glyptodon.guacamole.GuacamoleUnauthorizedException; import org.glyptodon.guacamole.net.auth.AuthenticationProvider; import org.glyptodon.guacamole.net.auth.Credentials; import org.glyptodon.guacamole.net.auth.UserContext; import org.glyptodon.guacamole.net.basic.event.SessionListenerCollection; import org.glyptodon.guacamole.net.basic.properties.BasicGuacamoleProperties; import org.glyptodon.guacamole.net.event.AuthenticationFailureEvent; import org.glyptodon.guacamole.net.event.AuthenticationSuccessEvent; import org.glyptodon.guacamole.net.event.listener.AuthenticationFailureListener; import org.glyptodon.guacamole.net.event.listener.AuthenticationSuccessListener; import org.glyptodon.guacamole.properties.GuacamoleProperties; import org.glyptodon.guacamole.protocol.GuacamoleStatus; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Abstract servlet which provides an authenticatedService() function that * is only called if the HTTP request is authenticated, or the current * HTTP session has already been authenticated. * * The user context is retrieved using the authentication provider defined in * guacamole.properties. The authentication provider has access to the request * and session, in addition to any submitted username and password, in order * to authenticate the user. * * The user context will be stored in the current HttpSession. * * Success and failure are logged. * * @author Michael Jumper */ public abstract class AuthenticatingHttpServlet extends HttpServlet { /** * Logger for this class. */ private Logger logger = LoggerFactory.getLogger(AuthenticatingHttpServlet.class); /** * The session attribute holding the current UserContext. */ public static final String CONTEXT_ATTRIBUTE = "GUAC_CONTEXT"; /** * The session attribute holding the credentials authorizing this session. */ public static final String CREDENTIALS_ATTRIBUTE = "GUAC_CREDS"; /** * The session attribute holding the session-scoped clipboard storage. */ public static final String CLIPBOARD_ATTRIBUTE = "GUAC_CLIP"; /** * The AuthenticationProvider to use to authenticate all requests. */ private AuthenticationProvider authProvider; /** * Whether HTTP authentication should be used (the "Authorization" header). */ private boolean useHttpAuthentication; @Override public void init() throws ServletException { // Parse Guacamole configuration try { // Get auth provider instance authProvider = GuacamoleProperties.getRequiredProperty(BasicGuacamoleProperties.AUTH_PROVIDER); // Enable HTTP auth, if requested useHttpAuthentication = GuacamoleProperties.getProperty(BasicGuacamoleProperties.ENABLE_HTTP_AUTH, false); } catch (GuacamoleException e) { logger.error("Error reading Guacamole configuration.", e); throw new ServletException(e); } } /** * Notifies all listeners in the given collection that authentication has * failed. * * @param listeners A collection of all listeners that should be notified. * @param credentials The credentials associated with the authentication * request that failed. */ private void notifyFailed(Collection listeners, Credentials credentials) { // Build event for auth failure AuthenticationFailureEvent event = new AuthenticationFailureEvent(credentials); // Notify all listeners for (Object listener : listeners) { try { if (listener instanceof AuthenticationFailureListener) ((AuthenticationFailureListener) listener).authenticationFailed(event); } catch (GuacamoleException e) { logger.error("Error notifying AuthenticationFailureListener.", e); } } } /** * Notifies all listeners in the given collection that authentication was * successful. * * @param listeners A collection of all listeners that should be notified. * @param context The UserContext created as a result of authentication * success. * @param credentials The credentials associated with the authentication * request that succeeded. * @return true if all listeners are allowing the authentication success, * or if there are no listeners, and false if any listener is * canceling the authentication success. Note that once one * listener cancels, no other listeners will run. * @throws GuacamoleException If any listener throws an error while being * notified. Note that if any listener throws an * error, the success is canceled, and no other * listeners will run. */ private boolean notifySuccess(Collection listeners, UserContext context, Credentials credentials) throws GuacamoleException { // Build event for auth success AuthenticationSuccessEvent event = new AuthenticationSuccessEvent(context, credentials); // Notify all listeners for (Object listener : listeners) { if (listener instanceof AuthenticationSuccessListener) { // Cancel immediately if hook returns false if (!((AuthenticationSuccessListener) listener).authenticationSucceeded(event)) return false; } } return true; } /** * Sends an error on the given HTTP response using the information within * the given GuacamoleStatus. * * @param response The HTTP response to use to send the error. * @param guac_status The status to send * @param message A human-readable message that can be presented to the * user. * @throws ServletException If an error prevents sending of the error * code. */ public static void sendError(HttpServletResponse response, GuacamoleStatus guac_status, String message) throws ServletException { try { // If response not committed, send error code and message if (!response.isCommitted()) { response.addHeader("Guacamole-Status-Code", Integer.toString(guac_status.getGuacamoleStatusCode())); response.addHeader("Guacamole-Error-Message", message); response.sendError(guac_status.getHttpStatusCode()); } } catch (IOException ioe) { // If unable to send error at all due to I/O problems, // rethrow as servlet exception throw new ServletException(ioe); } } /** * Returns the credentials associated with the given session. * * @param session The session to retrieve credentials from. * @return The credentials associated with the given session. */ public static Credentials getCredentials(HttpSession session) { return (Credentials) session.getAttribute(CREDENTIALS_ATTRIBUTE); } /** * Returns the UserContext associated with the given session. * * @param session The session to retrieve UserContext from. * @return The UserContext associated with the given session. */ public static UserContext getUserContext(HttpSession session) { return (UserContext) session.getAttribute(CONTEXT_ATTRIBUTE); } /** * Returns the ClipboardState associated with the given session. If none * exists yet, one is created. * * @param session The session to retrieve the ClipboardState from. * @return The ClipboardState associated with the given session. */ public static ClipboardState getClipboardState(HttpSession session) { ClipboardState clipboard = (ClipboardState) session.getAttribute(CLIPBOARD_ATTRIBUTE); if (clipboard == null) { clipboard = new ClipboardState(); session.setAttribute(CLIPBOARD_ATTRIBUTE, clipboard); } return clipboard; } /** * Returns whether the request given has updated credentials. If this * function returns false, the UserContext will not be updated. * * @param request The request to check for credentials. * @return true if the request contains credentials, false otherwise. */ protected boolean hasNewCredentials(HttpServletRequest request) { return true; } @Override protected void service(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { // Set character encoding to UTF-8 if it's not already set if(request.getCharacterEncoding() == null) { try { request.setCharacterEncoding("UTF-8"); } catch (UnsupportedEncodingException exception) { throw new ServletException(exception); } } try { // Obtain context from session HttpSession httpSession = request.getSession(true); UserContext context = getUserContext(httpSession); // If new credentials present, update/create context if (hasNewCredentials(request)) { // Retrieve username and password from parms String username = request.getParameter("username"); String password = request.getParameter("password"); // If no username/password given, try Authorization header if (useHttpAuthentication && username == null && password == null) { String authorization = request.getHeader("Authorization"); if (authorization != null && authorization.startsWith("Basic ")) { // Decode base64 authorization String basicBase64 = authorization.substring(6); String basicCredentials = new String(DatatypeConverter.parseBase64Binary(basicBase64), "UTF-8"); // Pull username/password from auth data int colon = basicCredentials.indexOf(':'); if (colon != -1) { username = basicCredentials.substring(0, colon); password = basicCredentials.substring(colon+1); } else logger.warn("Invalid HTTP Basic \"Authorization\" header received."); } } // end Authorization header fallback // Build credentials object Credentials credentials = new Credentials(); credentials.setSession(httpSession); credentials.setRequest(request); credentials.setUsername(username); credentials.setPassword(password); SessionListenerCollection listeners = new SessionListenerCollection(httpSession); // If no cached context, attempt to get new context if (context == null) { context = authProvider.getUserContext(credentials); // Log successful authentication if (context != null) logger.info("User \"{}\" successfully authenticated from {}.", context.self().getUsername(), request.getRemoteAddr()); } // Otherwise, update existing context else context = authProvider.updateUserContext(context, credentials); // If auth failed, notify listeners if (context == null) { logger.warn("Authentication attempt from {} for user \"{}\" failed.", request.getRemoteAddr(), credentials.getUsername()); notifyFailed(listeners, credentials); } // If auth succeeded, notify and check with listeners else if (!notifySuccess(listeners, context, credentials)) { logger.info("Successful authentication canceled by hook."); context = null; } // If auth still OK, associate context with session else { httpSession.setAttribute(CONTEXT_ATTRIBUTE, context); httpSession.setAttribute(CREDENTIALS_ATTRIBUTE, credentials); } } // end if credentials present // If no context, no authorizaton present if (context == null) throw new GuacamoleUnauthorizedException("Not authenticated"); // Allow servlet to run now that authentication has been validated authenticatedService(context, request, response); } // Catch any thrown guacamole exception and attempt to pass within the // HTTP response, logging each error appropriately. catch (GuacamoleClientException e) { logger.warn("Client request rejected: {}", e.getMessage()); sendError(response, e.getStatus(), e.getMessage()); } catch (GuacamoleException e) { logger.error("Internal server error.", e); sendError(response, e.getStatus(), "Internal server error."); } } /** * Function called after the credentials given in the request (if any) * are authenticated. If the current session is not associated with * valid credentials, this function will not be called. * * @param context The current UserContext. * @param request The HttpServletRequest being serviced. * @param response An HttpServletResponse which controls the HTTP response * of this servlet. * * @throws GuacamoleException If an error occurs that interferes with the * normal operation of this servlet. */ protected abstract void authenticatedService( UserContext context, HttpServletRequest request, HttpServletResponse response) throws GuacamoleException; }
package com.centurylink.mdw.service.rest; import com.centurylink.mdw.app.ApplicationContext; import com.centurylink.mdw.cli.Delete; import com.centurylink.mdw.cli.Discover; import com.centurylink.mdw.cli.Import; import com.centurylink.mdw.cli.Update; import com.centurylink.mdw.common.service.Query; import com.centurylink.mdw.common.service.ServiceException; import com.centurylink.mdw.common.service.SystemMessages; import com.centurylink.mdw.common.service.types.StatusMessage; import com.centurylink.mdw.config.PropertyManager; import com.centurylink.mdw.constant.PropertyNames; import com.centurylink.mdw.git.VersionControlGit; import com.centurylink.mdw.discovery.GitDiscoverer; import com.centurylink.mdw.model.JsonArray; import com.centurylink.mdw.model.Jsonable; import com.centurylink.mdw.model.asset.api.PackageAssets; import com.centurylink.mdw.model.asset.api.PackageList; import com.centurylink.mdw.model.system.Bulletin; import com.centurylink.mdw.model.system.SystemMessage.Level; import com.centurylink.mdw.model.user.Role; import com.centurylink.mdw.model.user.UserAction.Entity; import com.centurylink.mdw.model.user.Workgroup; import com.centurylink.mdw.service.data.user.UserGroupCache; import com.centurylink.mdw.services.AssetServices; import com.centurylink.mdw.services.ServiceLocator; import com.centurylink.mdw.services.cache.CacheRegistration; import com.centurylink.mdw.services.rest.JsonRestService; import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; import io.swagger.annotations.ApiImplicitParams; import io.swagger.annotations.ApiOperation; import org.eclipse.jgit.transport.UsernamePasswordCredentialsProvider; import org.json.JSONException; import org.json.JSONObject; import javax.ws.rs.Path; import java.io.File; import java.net.URL; import java.nio.file.Files; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.util.ArrayList; import java.util.List; import java.util.Map; @Path("/Assets") @Api("MDW assets") public class Assets extends JsonRestService { protected List<String> getRoles(String path, String method) { if (method.equals("GET")) { List<String> roles = new ArrayList<>(); if (UserGroupCache.getRole(Role.ASSET_VIEW) != null) { roles.add(Role.ASSET_VIEW); roles.add(Role.ASSET_DESIGN); roles.add(Workgroup.SITE_ADMIN_GROUP); } return roles; } else { List<String> roles = super.getRoles(path, method); roles.add(Role.ASSET_DESIGN); return roles; } } @Override protected Entity getEntity(String path, Object content, Map<String,String> headers) { return Entity.Asset; } /** * Retrieve workflow asset, package or packages * The discoveryUrl param tells us to retrieve from a remote instance. * For this case We retrieve the discovery package list on the server to avoid browser CORS complications. */ @Override @Path("/{package}/{asset}") @ApiOperation(value="Retrieve an asset, an asset package, or all the asset packages", response=PackageList.class) @ApiImplicitParams({ @ApiImplicitParam(name="discoveryUrls", paramType="query", dataType="array"), @ApiImplicitParam(name="branch", paramType="query", dataType="string"), @ApiImplicitParam(name="discoveryType", paramType="query", dataType="string"), @ApiImplicitParam(name="groupId", paramType="query", dataType="string"), @ApiImplicitParam(name="archiveDirs", paramType="query", dataType="string")}) public JSONObject get(String path, Map<String,String> headers) throws ServiceException, JSONException { try { AssetServices assetServices = ServiceLocator.getAssetServices(); Query query = getQuery(path, headers); String discoveryType = query.getFilter("discoveryType"); if (discoveryType != null) { if (!discoveryType.isEmpty() && discoveryType.equals("central")) { String groupId = query.getFilter("groupId"); try { Discover discover = new Discover(groupId, true); return discover.run().getPackages(); } catch (JSONException e) { throw new ServiceException(ServiceException.INTERNAL_ERROR, "Invalid response from maven central search query", e); } } else if ("git".equals(discoveryType)){ String[] repoUrls = query.getArrayFilter("discoveryUrls"); String branch = query.getFilter("branch"); if (branch != null) return assetServices.discoverGitAssets(repoUrls[0], branch); else return assetServices.getGitBranches(repoUrls); } } String pkg = getSegment(path, 1); String asset = pkg == null ? null : getSegment(path, 2); String version = getSegment(path, 3); if (pkg == null) { if (query.hasFilters() && !query.getBooleanFilter("packageList")) { return assetServices.getAssetPackageList(query).getJson(); } else { return getPackages(query).getJson(); } } else { if (asset == null) { return getPackage(pkg).getJson(); } else { return getAssetInfo(pkg + "/" + asset, version, query.getFilter("stagingUser")).getJson(); } } } catch (ServiceException ex) { throw ex; } catch (Exception ex) { throw new ServiceException(ex.getMessage(), ex); } } /** * Import discovered assets. */ @Override @Path("/packages") @ApiOperation(value="Import discovered asset packages", response=StatusMessage.class) @ApiImplicitParams({ @ApiImplicitParam(name="discoveryUrl", paramType="query", dataType="string"), @ApiImplicitParam(name="branch", paramType="query", dataType="string"), @ApiImplicitParam(name="discoveryType", paramType="query", required=true), @ApiImplicitParam(name="groupId", paramType="query", dataType="string"), @ApiImplicitParam(name="packages", paramType="body", required=true, dataType="List")}) public JSONObject put(String path, JSONObject content, Map<String,String> headers) throws ServiceException, JSONException { Query query = getQuery(path, headers); String discoveryType = query.getFilter("discoveryType"); if (discoveryType == null) throw new ServiceException(ServiceException.BAD_REQUEST, "Missing param: discoveryType"); List<String> pkgs = new JsonArray(content.getJSONArray("packages")).getList(); Query discQuery = new Query(path); discQuery.setArrayFilter("packages", pkgs.toArray(new String[0])); File assetRoot = ApplicationContext.getAssetRoot(); Bulletin bulletin = null; try { // central discovery if (!discoveryType.isEmpty() && discoveryType.equals("central")) { String groupId = query.getFilter("groupId"); if (groupId == null) throw new ServiceException(ServiceException.BAD_REQUEST, "Missing param: groupId"); bulletin = SystemMessages.bulletinOn("Asset import in progress..."); Import importer = new Import(groupId, pkgs); importer.setAssetLoc(assetRoot.getPath()); importer.setForce(true); importer.run(); SystemMessages.bulletinOff(bulletin, "Asset import completed"); bulletin = null; Thread thread = new Thread() { public void run() { this.setName("AssetsCacheRefresh-thread"); CacheRegistration.getInstance().refreshCaches(); } }; thread.start(); } else { String discoveryUrl = query.getFilter("discoveryUrl"); if ("https://github.com/CenturyLinkCloud/mdw.git".equals(discoveryUrl)) { Update update = new Update(null); update.setAssetLoc(assetRoot.getPath()); update.setBaseAssetPackages(pkgs); update.setMdwVersion(ApplicationContext.getMdwVersion()); update.run(); } else { String branch = query.getFilter("branch"); if (branch == null) throw new ServiceException(ServiceException.BAD_REQUEST, "Missing param: groupId"); File gitLocalPath = new File(PropertyManager.getProperty(PropertyNames.MDW_GIT_LOCAL_PATH)); boolean fetch = PropertyManager.getBooleanProperty(PropertyNames.MDW_GIT_FETCH, !ApplicationContext.isDevelopment()); VersionControlGit vcGit = new VersionControlGit(fetch); AssetServices assetServices = ServiceLocator.getAssetServices(); GitDiscoverer discoverer = assetServices.getDiscoverer(discoveryUrl); discoverer.setRef(branch); String assetPath = discoverer.getAssetPath(); if (discoveryUrl.indexOf('?') != -1) discoveryUrl = discoveryUrl.substring(0, discoveryUrl.indexOf('?')); URL url = new URL(discoveryUrl); String[] userInfo = url.getUserInfo().split(":"); File test = new File(gitLocalPath.getAbsolutePath()).getParentFile(); File tempfile = null; if (test != null) { if (gitLocalPath.getPath().length() <= 3) test = new File(gitLocalPath.getAbsolutePath()).getParentFile().getParentFile().getParentFile(); tempfile = new File(test.getAbsolutePath() + "/" + "mdw_git_discovery_" + java.lang.System.currentTimeMillis()); } vcGit.connect(discoveryUrl, null, null, tempfile); vcGit.setCredentialsProvider(new UsernamePasswordCredentialsProvider(userInfo[0], userInfo[1])); vcGit.clone(branch, null); for (String pkg : pkgs) { String pkgPath = pkg.replace(".", "/"); if (tempfile != null) { String src = tempfile.getAbsolutePath() + "/" + assetPath + "/" + pkgPath; String dest = ApplicationContext.getAssetRoot().getAbsolutePath() + "/" + pkgPath; Files.createDirectories(Paths.get(dest)); Files.move(Paths.get(src), Paths.get(dest), StandardCopyOption.REPLACE_EXISTING); } } new Delete(tempfile).run(); } } } catch (Exception ex) { SystemMessages.bulletinOff(bulletin, Level.Error, "Asset import failed: " + ex.getMessage()); throw new ServiceException(ServiceException.INTERNAL_ERROR, ex.getMessage(), ex); } return null; } /** * TODO: Content is ignored, and an empty asset is always created. */ @Override public JSONObject post(String path, JSONObject content, Map<String, String> headers) throws ServiceException, JSONException { String[] segments = getSegments(path); Query query = getQuery(path, headers); String stagingCuid = query.getFilter("stagingUser"); if (segments.length == 2) { // create package if (stagingCuid != null) { ServiceLocator.getStagingServices().createPackage(stagingCuid, segments[1]); } else { ServiceLocator.getAssetServices().createPackage(segments[1]); CacheRegistration.getInstance().refreshCache("PackageCache"); } } else if (segments.length == 3) { // create asset String asset = segments[1] + '/' + segments[2]; if (segments[2].endsWith(".proc") && stagingCuid == null) { if (query.getFilter("template") == null) query.setFilter("template", "new"); ServiceLocator.getWorkflowServices().createProcess(asset, query); } else { String template = query.getFilter("template"); if (stagingCuid != null) { ServiceLocator.getStagingServices().createAsset(stagingCuid, asset, template); } else { ServiceLocator.getAssetServices().createAsset(asset); } } } else { throw new ServiceException(ServiceException.BAD_REQUEST, "Invalid path: " + path); } return null; } @Override @Path("/{package}/{asset}") @ApiOperation(value="Delete an asset or an asset package", response=PackageList.class) public JSONObject delete(String path, JSONObject content, Map<String,String> headers) throws ServiceException, JSONException { String[] segments = getSegments(path); String stagingCuid = getQuery(path, headers).getFilter("stagingUser"); if (segments.length == 2) { if (stagingCuid == null) { ServiceLocator.getAssetServices().deletePackage(segments[1]); } } else if (segments.length == 3) { String assetPath = segments[1] + '/' + segments[2]; if (stagingCuid == null) { ServiceLocator.getAssetServices().deleteAsset(assetPath); } else { ServiceLocator.getStagingServices().deleteAsset(stagingCuid, assetPath); } } else { throw new ServiceException(ServiceException.BAD_REQUEST, "Invalid path: " + path); } return null; } public PackageList getPackages(Query query) throws ServiceException { String withVcsInfo = query.getFilter("withVcsInfo"); boolean withVcs = withVcsInfo == null || Boolean.parseBoolean(withVcsInfo); String stagingCuid = query.getFilter("stagingUser"); if (stagingCuid != null) { return ServiceLocator.getStagingServices().getPackages(stagingCuid, withVcs); } else { return ServiceLocator.getAssetServices().getPackages(withVcs); } } @Path("/{package}") public PackageAssets getPackage(String pkg) throws ServiceException { PackageAssets pkgAssets = ServiceLocator.getAssetServices().getAssets(pkg, true); if (pkgAssets == null) throw new ServiceException(ServiceException.NOT_FOUND, "No such package: " + pkg); else return pkgAssets; } public Jsonable getAssetInfo(String assetPath, String version, String stagingCuid) throws ServiceException { Jsonable theAsset; if (stagingCuid != null) { theAsset = ServiceLocator.getStagingServices().getStagedAsset(stagingCuid, assetPath); } else if (version != null){ theAsset = ServiceLocator.getDesignServices().getAsset(assetPath, version, true); } else { theAsset = ServiceLocator.getAssetServices().getAsset(assetPath, true); } if (theAsset == null) throw new ServiceException(ServiceException.NOT_FOUND, "No such asset: " + assetPath); else return theAsset; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Alexander T. Simbirtsev * Created on 03.12.2004 */ package javax.swing.border; import java.awt.Color; import java.awt.Insets; import javax.swing.JComponent; import javax.swing.JPanel; import javax.swing.SwingTestCase; public class BevelBorderTest extends SwingTestCase { public static void main(final String[] args) { junit.textui.TestRunner.run(BevelBorderTest.class); } /* * Class under test for void BevelBorder(int) */ public void testBevelBorderint() { int bevelType = BevelBorder.LOWERED; Color highlightColor = null; Color shadowColor = null; BevelBorder border = new BevelBorder(bevelType); assertEquals("highlightOuterColor coinsides", highlightColor, border .getHighlightOuterColor()); assertEquals("highlightInnerColor coinsides", highlightColor, border .getHighlightInnerColor()); assertEquals("shadowOuterColor coinsides", shadowColor, border.getShadowOuterColor()); assertEquals("shadowInnerColor coinsides", shadowColor, border.getShadowInnerColor()); assertEquals("Bevel type coinsides", bevelType, border.getBevelType()); bevelType = BevelBorder.RAISED; border = new BevelBorder(bevelType); assertEquals("highlightOuterColor coinsides", highlightColor, border .getHighlightOuterColor()); assertEquals("highlightInnerColor coinsides", highlightColor, border .getHighlightInnerColor()); assertEquals("shadowOuterColor coinsides", shadowColor, border.getShadowOuterColor()); assertEquals("shadowInnerColor coinsides", shadowColor, border.getShadowInnerColor()); assertEquals("Bevel type coinsides", bevelType, border.getBevelType()); } /* * Class under test for void BevelBorder(int, Color, Color) */ public void testBevelBorderintColorColor() { int bevelType = BevelBorder.LOWERED; Color highlightColor = Color.RED; Color shadowColor = Color.GREEN; BevelBorder border = new BevelBorder(bevelType, highlightColor, shadowColor); assertEquals("highlightOuterColor coinsides", highlightColor, border .getHighlightOuterColor()); assertEquals("highlightInnerColor coinsides", highlightColor, border .getHighlightInnerColor()); assertEquals("shadowOuterColor coinsides", shadowColor, border.getShadowOuterColor()); assertEquals("shadowInnerColor coinsides", shadowColor, border.getShadowInnerColor()); assertEquals("Bevel type coinsides", bevelType, border.getBevelType()); bevelType = BevelBorder.RAISED; highlightColor = Color.YELLOW; shadowColor = Color.WHITE; border = new BevelBorder(bevelType, highlightColor, shadowColor); assertEquals("highlightOuterColor coinsides", highlightColor, border .getHighlightOuterColor()); assertEquals("highlightInnerColor coinsides", highlightColor, border .getHighlightInnerColor()); assertEquals("shadowOuterColor coinsides", shadowColor, border.getShadowOuterColor()); assertEquals("shadowInnerColor coinsides", shadowColor, border.getShadowInnerColor()); assertEquals("Bevel type coinsides", bevelType, border.getBevelType()); } /* * Class under test for void BevelBorder(int, Color, Color, Color, Color) */ public void testBevelBorderintColorColorColorColor() { int bevelType = BevelBorder.LOWERED; Color highlightOuterColor = Color.RED; Color highlightInnerColor = Color.YELLOW; Color shadowOuterColor = Color.GREEN; Color shadowInnerColor = Color.BLACK; BevelBorder border = new BevelBorder(bevelType, highlightOuterColor, highlightInnerColor, shadowOuterColor, shadowInnerColor); assertEquals("highlightOuterColor coinsides", highlightOuterColor, border .getHighlightOuterColor()); assertEquals("highlightInnerColor coinsides", highlightInnerColor, border .getHighlightInnerColor()); assertEquals("shadowOuterColor coinsides", shadowOuterColor, border .getShadowOuterColor()); assertEquals("shadowInnerColor coinsides", shadowInnerColor, border .getShadowInnerColor()); assertEquals("Bevel type coinsides", bevelType, border.getBevelType()); bevelType = BevelBorder.RAISED; highlightOuterColor = Color.YELLOW; highlightInnerColor = Color.RED; shadowOuterColor = Color.WHITE; shadowInnerColor = Color.BLUE; border = new BevelBorder(bevelType, highlightOuterColor, highlightInnerColor, shadowOuterColor, shadowInnerColor); assertEquals("highlightOuterColor coinsides", highlightOuterColor, border .getHighlightOuterColor()); assertEquals("highlightInnerColor coinsides", highlightInnerColor, border .getHighlightInnerColor()); assertEquals("shadowOuterColor coinsides", shadowOuterColor, border .getShadowOuterColor()); assertEquals("shadowInnerColor coinsides", shadowInnerColor, border .getShadowInnerColor()); assertEquals("Bevel type coinsides", bevelType, border.getBevelType()); } /* * Class under test for Insets getBorderInsets(Component, Insets) */ public void testGetBorderInsetsComponentInsets() { int thickness1 = 2; int thickness2 = 22; int thickness3 = 33; BevelBorder border = new BevelBorder(BevelBorder.RAISED, Color.black, Color.white); Insets insets = new Insets(1, 1, 1, 1); JPanel panel = new JPanel(); border.getBorderInsets(panel, insets); assertEquals("insets values coinside", thickness1, insets.top); assertEquals("insets values coinside", thickness1, insets.left); assertEquals("insets values coinside", thickness1, insets.right); assertEquals("insets values coinside", thickness1, insets.bottom); panel.setBorder(new LineBorder(Color.black, thickness2)); border.getBorderInsets(panel, insets); assertEquals("insets values coinside", thickness1, insets.top); assertEquals("insets values coinside", thickness1, insets.left); assertEquals("insets values coinside", thickness1, insets.right); assertEquals("insets values coinside", thickness1, insets.bottom); insets = new Insets(thickness3, thickness3, thickness3, thickness3); panel.setBorder(new BevelBorder(BevelBorder.LOWERED)); Insets newInsets = border.getBorderInsets(panel, insets); assertEquals("insets values coinside", thickness1, newInsets.top); assertEquals("insets values coinside", thickness1, newInsets.left); assertEquals("insets values coinside", thickness1, newInsets.right); assertEquals("insets values coinside", thickness1, newInsets.bottom); assertEquals("insets values coinside", thickness1, insets.top); assertEquals("insets values coinside", thickness1, insets.left); assertEquals("insets values coinside", thickness1, insets.right); assertEquals("insets values coinside", thickness1, insets.bottom); } /* * Class under test for Insets getBorderInsets(Component) */ public void testGetBorderInsetsComponent() { int thickness1 = 2; int thickness2 = 22; int thickness3 = 33; BevelBorder border = new BevelBorder(BevelBorder.RAISED, Color.black, Color.white); Insets insets = new Insets(1, 1, 1, 1); JPanel panel = new JPanel(); insets = border.getBorderInsets(null); assertEquals("insets values coinside", thickness1, insets.top); assertEquals("insets values coinside", thickness1, insets.left); assertEquals("insets values coinside", thickness1, insets.right); assertEquals("insets values coinside", thickness1, insets.bottom); panel.setBorder(new LineBorder(Color.black, thickness2)); insets = border.getBorderInsets(panel); assertEquals("insets values coinside", thickness1, insets.top); assertEquals("insets values coinside", thickness1, insets.left); assertEquals("insets values coinside", thickness1, insets.right); assertEquals("insets values coinside", thickness1, insets.bottom); insets = new Insets(thickness3, thickness3, thickness3, thickness3); panel.setBorder(new BevelBorder(BevelBorder.LOWERED)); insets = border.getBorderInsets(panel); assertEquals("insets values coinside", thickness1, insets.top); assertEquals("insets values coinside", thickness1, insets.left); assertEquals("insets values coinside", thickness1, insets.right); assertEquals("insets values coinside", thickness1, insets.bottom); } /** * This method is being tested by testPaintBorder() */ public void testPaintRaisedBevel() { } /** * This method is being tested by testPaintBorder() */ public void testPaintLoweredBevel() { } public void testPaintBorder() { // JPanel panel1 = new JPanel(); // JPanel panel2 = new JPanel(); // JPanel panel3 = new JPanel(); // // Color shadowInnerColor = Color.GREEN; // Color shadowOuterColor = Color.BLACK; // Color highlightedInnerColor = Color.RED; // Color highlightedOuterColor = Color.BLUE; // // BevelBorder border1 = new BevelBorder(EtchedBorder.LOWERED, highlightedOuterColor, highlightedInnerColor, // shadowOuterColor, shadowInnerColor); // BevelBorder border2 = new BevelBorder(EtchedBorder.RAISED, highlightedOuterColor, highlightedInnerColor, // shadowOuterColor, shadowInnerColor); // panel2.setBorder(border1); // panel3.setBorder(border2); // panel2.setPreferredSize(new Dimension(200, 150)); // panel3.setPreferredSize(new Dimension(200, 150)); // // panel1.setLayout(new BoxLayout(panel1, BoxLayout.X_AXIS)); // panel1.add(panel2); // panel1.add(panel3); // // JFrame frame = new JFrame(); // frame.getContentPane().add(panel1); // frame.pack(); // frame.show(); // while(!frame.isActive()); // while(frame.isActive()); } public void testIsBorderOpaque() { Color shadowColor = Color.GREEN; Color highlightedColor = Color.RED; BevelBorder border = new BevelBorder(BevelBorder.LOWERED, highlightedColor, shadowColor); assertTrue("BevelBorder is opaque", border.isBorderOpaque()); border = new BevelBorder(BevelBorder.RAISED); assertTrue("BevelBorder is opaque", border.isBorderOpaque()); } /* * Class under test for Color getShadowOuterColor(Component) */ public void testGetShadowOuterColorComponent() { JComponent c1 = new JPanel(); JComponent c2 = new JPanel(); c1.setBackground(new Color(110, 110, 110)); c1.setForeground(new Color(210, 210, 210)); c2.setBackground(new Color(10, 10, 10)); c2.setForeground(new Color(110, 110, 110)); Color shadowInnerColor = Color.GREEN; Color shadowOuterColor = Color.CYAN; Color highlightedInnerColor = Color.RED; Color highlightedOuterColor = Color.YELLOW; BevelBorder border = new BevelBorder(BevelBorder.LOWERED, highlightedOuterColor, highlightedInnerColor, shadowOuterColor, shadowInnerColor); assertEquals("Shadow Outer color coinsides", shadowOuterColor, border .getShadowOuterColor(c1)); assertEquals("Shadow Outer color coinsides", shadowOuterColor, border .getShadowOuterColor(c2)); border = new BevelBorder(BevelBorder.RAISED); assertEquals("Shadow Outer color coinsides", new Color(53, 53, 53), border .getShadowOuterColor(c1)); assertEquals("Shadow Outer color coinsides", new Color(4, 4, 4), border .getShadowOuterColor(c2)); } /* * Class under test for Color getShadowInnerColor(Component) */ public void testGetShadowInnerColorComponent() { JComponent c1 = new JPanel(); JComponent c2 = new JPanel(); c1.setBackground(new Color(110, 110, 110)); c1.setForeground(new Color(210, 210, 210)); c2.setBackground(new Color(10, 10, 10)); c2.setForeground(new Color(110, 110, 110)); Color shadowInnerColor = Color.GREEN; Color shadowOuterColor = Color.CYAN; Color highlightedInnerColor = Color.RED; Color highlightedOuterColor = Color.YELLOW; BevelBorder border = new BevelBorder(BevelBorder.LOWERED, highlightedOuterColor, highlightedInnerColor, shadowOuterColor, shadowInnerColor); assertEquals("Shadow Inner color coinsides", shadowInnerColor, border .getShadowInnerColor(c1)); assertEquals("Shadow Inner color coinsides", shadowInnerColor, border .getShadowInnerColor(c2)); border = new BevelBorder(BevelBorder.RAISED); assertEquals("Shadow Inner color coinsides", new Color(77, 77, 77), border .getShadowInnerColor(c1)); assertEquals("Shadow Inner color coinsides", new Color(7, 7, 7), border .getShadowInnerColor(c2)); } /* * Class under test for Color getHighlightOuterColor(Component) */ public void testGetHighlightOuterColorComponent() { JComponent c1 = new JPanel(); JComponent c2 = new JPanel(); c1.setBackground(new Color(110, 110, 110)); c1.setForeground(new Color(210, 210, 210)); c2.setBackground(new Color(10, 10, 10)); c2.setForeground(new Color(110, 110, 110)); Color shadowInnerColor = Color.GREEN; Color shadowOuterColor = Color.CYAN; Color highlightedInnerColor = Color.RED; Color highlightedOuterColor = Color.YELLOW; BevelBorder border = new BevelBorder(BevelBorder.LOWERED, highlightedOuterColor, highlightedInnerColor, shadowOuterColor, shadowInnerColor); assertEquals("Highlight Outer color coinsides", highlightedOuterColor, border .getHighlightOuterColor(c1)); assertEquals("Highlight Outer color coinsides", highlightedOuterColor, border .getHighlightOuterColor(c2)); border = new BevelBorder(BevelBorder.RAISED); assertEquals("Highlight Outer color coinsides", new Color(224, 224, 224), border .getHighlightOuterColor(c1)); assertEquals("Highlight Outer color coinsides", new Color(20, 20, 20), border .getHighlightOuterColor(c2)); } /* * Class under test for Color getHighlightInnerColor(Component) */ public void testGetHighlightInnerColorComponent() { JComponent c1 = new JPanel(); JComponent c2 = new JPanel(); c1.setBackground(new Color(110, 110, 110)); c1.setForeground(new Color(210, 210, 210)); c2.setBackground(new Color(10, 10, 10)); c2.setForeground(new Color(110, 110, 110)); Color shadowInnerColor = Color.GREEN; Color shadowOuterColor = Color.CYAN; Color highlightedInnerColor = Color.RED; Color highlightedOuterColor = Color.YELLOW; BevelBorder border = new BevelBorder(BevelBorder.LOWERED, highlightedOuterColor, highlightedInnerColor, shadowOuterColor, shadowInnerColor); assertEquals("Highlight Inner color coinsides", highlightedInnerColor, border .getHighlightInnerColor(c1)); assertEquals("Highlight Inner color coinsides", highlightedInnerColor, border .getHighlightInnerColor(c2)); border = new BevelBorder(BevelBorder.RAISED); assertEquals("Highlight Inner color coinsides", new Color(157, 157, 157), border .getHighlightInnerColor(c1)); assertEquals("Highlight Inner color coinsides", new Color(14, 14, 14), border .getHighlightInnerColor(c2)); } /* * Class under test for Color getShadowOuterColor() */ public void testGetShadowOuterColor() { Color shadowInnerColor = Color.YELLOW; Color shadowOuterColor = Color.CYAN; Color highlightedInnerColor = Color.RED; Color highlightedOuterColor = Color.GRAY; BevelBorder border = new BevelBorder(BevelBorder.LOWERED, highlightedOuterColor, highlightedInnerColor, shadowOuterColor, shadowInnerColor); assertEquals("Shadow Outer color coinsides", shadowOuterColor, border .getShadowOuterColor()); shadowOuterColor = Color.GREEN; highlightedOuterColor = Color.WHITE; border = new BevelBorder(BevelBorder.RAISED, highlightedOuterColor, highlightedInnerColor, shadowOuterColor, shadowInnerColor); assertEquals("Shadow Outer color coinsides", shadowOuterColor, border .getShadowOuterColor()); } /* * Class under test for Color getShadowInnerColor() */ public void testGetShadowInnerColor() { Color shadowInnerColor = Color.YELLOW; Color shadowOuterColor = Color.CYAN; Color highlightedInnerColor = Color.RED; Color highlightedOuterColor = Color.GRAY; BevelBorder border = new BevelBorder(BevelBorder.LOWERED, highlightedOuterColor, highlightedInnerColor, shadowOuterColor, shadowInnerColor); assertEquals("Shadow Inner color coinsides", shadowInnerColor, border .getShadowInnerColor()); shadowInnerColor = Color.GREEN; highlightedInnerColor = Color.WHITE; border = new BevelBorder(BevelBorder.RAISED, highlightedOuterColor, highlightedInnerColor, shadowOuterColor, shadowInnerColor); assertEquals("Shadow Inner color coinsides", shadowInnerColor, border .getShadowInnerColor()); } /* * Class under test for Color getHighlightOuterColor() */ public void testGetHighlightOuterColor() { Color shadowInnerColor = Color.YELLOW; Color shadowOuterColor = Color.CYAN; Color highlightedInnerColor = Color.RED; Color highlightedOuterColor = Color.GRAY; BevelBorder border = new BevelBorder(BevelBorder.LOWERED, highlightedOuterColor, highlightedInnerColor, shadowOuterColor, shadowInnerColor); assertEquals("Highlighted Outer color coinsides", highlightedOuterColor, border .getHighlightOuterColor()); shadowOuterColor = Color.GREEN; highlightedOuterColor = Color.WHITE; border = new BevelBorder(BevelBorder.RAISED, highlightedOuterColor, highlightedInnerColor, shadowOuterColor, shadowInnerColor); assertEquals("Highlighted Outer color coinsides", highlightedOuterColor, border .getHighlightOuterColor()); } /* * Class under test for Color getHighlightInnerColor() */ public void testGetHighlightInnerColor() { Color shadowInnerColor = Color.YELLOW; Color shadowOuterColor = Color.CYAN; Color highlightedInnerColor = Color.RED; Color highlightedOuterColor = Color.GRAY; BevelBorder border = new BevelBorder(BevelBorder.LOWERED, highlightedOuterColor, highlightedInnerColor, shadowOuterColor, shadowInnerColor); assertEquals("Highlighted Inner color coinsides", highlightedInnerColor, border .getHighlightInnerColor()); shadowInnerColor = Color.GREEN; highlightedInnerColor = Color.WHITE; border = new BevelBorder(BevelBorder.LOWERED, highlightedOuterColor, highlightedInnerColor, shadowOuterColor, shadowInnerColor); assertEquals("Highlighted Inner color coinsides", highlightedInnerColor, border .getHighlightInnerColor()); } public void testGetBevelType() { int bevelType = BevelBorder.LOWERED; BevelBorder border = new BevelBorder(bevelType); assertEquals("Bevel type coinsides", bevelType, border.getBevelType()); bevelType = BevelBorder.RAISED; border = new BevelBorder(bevelType); assertEquals("Etch type coinsides", bevelType, border.getBevelType()); } public void testReadWriteObject() throws Exception { Color shadowInnerColor1 = Color.RED; Color shadowInnerColor2 = Color.BLUE; Color shadowOuterColor1 = Color.CYAN; Color shadowOuterColor2 = Color.MAGENTA; Color highlightedInnerColor1 = Color.YELLOW; Color highlightedInnerColor2 = Color.GREEN; Color highlightedOuterColor1 = Color.DARK_GRAY; Color highlightedOuterColor2 = Color.LIGHT_GRAY; int bevelType1 = BevelBorder.LOWERED; int bevelType2 = BevelBorder.RAISED; BevelBorder border1 = new BevelBorder(bevelType1, highlightedOuterColor1, highlightedInnerColor1, shadowOuterColor1, shadowInnerColor1); BevelBorder border2 = new BevelBorder(bevelType2, highlightedOuterColor2, highlightedInnerColor2, shadowOuterColor2, shadowInnerColor2); BevelBorder resurrectedBorder = (BevelBorder) serializeObject(border1); assertNotNull(resurrectedBorder); assertEquals("Deserialized values coinsides", resurrectedBorder.getBevelType(), border1 .getBevelType()); assertEquals("Deserialized values coinsides", resurrectedBorder.getShadowInnerColor(), border1.getShadowInnerColor()); assertEquals("Deserialized values coinsides", resurrectedBorder.getShadowOuterColor(), border1.getShadowOuterColor()); assertEquals("Deserialized values coinsides", resurrectedBorder .getHighlightInnerColor(), border1.getHighlightInnerColor()); assertEquals("Deserialized values coinsides", resurrectedBorder .getHighlightOuterColor(), border1.getHighlightOuterColor()); resurrectedBorder = (BevelBorder) serializeObject(border2); assertNotNull(resurrectedBorder); assertEquals("Deserialized values coinsides", resurrectedBorder.getBevelType(), border2 .getBevelType()); assertEquals("Deserialized values coinsides", resurrectedBorder.getShadowInnerColor(), border2.getShadowInnerColor()); assertEquals("Deserialized values coinsides", resurrectedBorder.getShadowOuterColor(), border2.getShadowOuterColor()); assertEquals("Deserialized values coinsides", resurrectedBorder .getHighlightInnerColor(), border2.getHighlightInnerColor()); assertEquals("Deserialized values coinsides", resurrectedBorder .getHighlightOuterColor(), border2.getHighlightOuterColor()); } public void testBevelType() throws Exception { // Regression test for HARMONY-2590 for (int i = -10; i < 10; i++) { new BevelBorder(i); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.segment.realtime.appenderator; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Function; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; import org.apache.druid.client.CachingQueryRunner; import org.apache.druid.client.cache.Cache; import org.apache.druid.client.cache.CacheConfig; import org.apache.druid.client.cache.CachePopulatorStats; import org.apache.druid.client.cache.ForegroundCachePopulator; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.Pair; import org.apache.druid.java.util.common.concurrent.Execs; import org.apache.druid.java.util.common.guava.CloseQuietly; import org.apache.druid.java.util.common.guava.FunctionalIterable; import org.apache.druid.java.util.emitter.EmittingLogger; import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.query.BySegmentQueryRunner; import org.apache.druid.query.CPUTimeMetricQueryRunner; import org.apache.druid.query.FinalizeResultsQueryRunner; import org.apache.druid.query.MetricsEmittingQueryRunner; import org.apache.druid.query.NoopQueryRunner; import org.apache.druid.query.Query; import org.apache.druid.query.QueryMetrics; import org.apache.druid.query.QueryRunner; import org.apache.druid.query.QueryRunnerFactory; import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.query.QueryRunnerHelper; import org.apache.druid.query.QuerySegmentWalker; import org.apache.druid.query.QueryToolChest; import org.apache.druid.query.ReportTimelineMissingSegmentQueryRunner; import org.apache.druid.query.SegmentDescriptor; import org.apache.druid.query.SinkQueryRunners; import org.apache.druid.query.TableDataSource; import org.apache.druid.query.spec.SpecificSegmentQueryRunner; import org.apache.druid.query.spec.SpecificSegmentSpec; import org.apache.druid.segment.Segment; import org.apache.druid.segment.realtime.FireHydrant; import org.apache.druid.segment.realtime.plumber.Sink; import org.apache.druid.timeline.SegmentId; import org.apache.druid.timeline.TimelineObjectHolder; import org.apache.druid.timeline.VersionedIntervalTimeline; import org.apache.druid.timeline.partition.PartitionChunk; import org.apache.druid.timeline.partition.PartitionHolder; import org.joda.time.Interval; import java.io.Closeable; import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicLong; public class SinkQuerySegmentWalker implements QuerySegmentWalker { private static final EmittingLogger log = new EmittingLogger(SinkQuerySegmentWalker.class); private static final String CONTEXT_SKIP_INCREMENTAL_SEGMENT = "skipIncrementalSegment"; private final String dataSource; private final VersionedIntervalTimeline<String, Sink> sinkTimeline; private final ObjectMapper objectMapper; private final ServiceEmitter emitter; private final QueryRunnerFactoryConglomerate conglomerate; private final ExecutorService queryExecutorService; private final Cache cache; private final CacheConfig cacheConfig; private final CachePopulatorStats cachePopulatorStats; public SinkQuerySegmentWalker( String dataSource, VersionedIntervalTimeline<String, Sink> sinkTimeline, ObjectMapper objectMapper, ServiceEmitter emitter, QueryRunnerFactoryConglomerate conglomerate, ExecutorService queryExecutorService, Cache cache, CacheConfig cacheConfig, CachePopulatorStats cachePopulatorStats ) { this.dataSource = Preconditions.checkNotNull(dataSource, "dataSource"); this.sinkTimeline = Preconditions.checkNotNull(sinkTimeline, "sinkTimeline"); this.objectMapper = Preconditions.checkNotNull(objectMapper, "objectMapper"); this.emitter = Preconditions.checkNotNull(emitter, "emitter"); this.conglomerate = Preconditions.checkNotNull(conglomerate, "conglomerate"); this.queryExecutorService = Preconditions.checkNotNull(queryExecutorService, "queryExecutorService"); this.cache = Preconditions.checkNotNull(cache, "cache"); this.cacheConfig = Preconditions.checkNotNull(cacheConfig, "cacheConfig"); this.cachePopulatorStats = Preconditions.checkNotNull(cachePopulatorStats, "cachePopulatorStats"); if (!cache.isLocal()) { log.warn("Configured cache[%s] is not local, caching will not be enabled.", cache.getClass().getName()); } } @Override public <T> QueryRunner<T> getQueryRunnerForIntervals(final Query<T> query, final Iterable<Interval> intervals) { final Iterable<SegmentDescriptor> specs = FunctionalIterable .create(intervals) .transformCat( new Function<Interval, Iterable<TimelineObjectHolder<String, Sink>>>() { @Override public Iterable<TimelineObjectHolder<String, Sink>> apply(final Interval interval) { return sinkTimeline.lookup(interval); } } ) .transformCat( new Function<TimelineObjectHolder<String, Sink>, Iterable<SegmentDescriptor>>() { @Override public Iterable<SegmentDescriptor> apply(final TimelineObjectHolder<String, Sink> holder) { return FunctionalIterable .create(holder.getObject()) .transform( new Function<PartitionChunk<Sink>, SegmentDescriptor>() { @Override public SegmentDescriptor apply(final PartitionChunk<Sink> chunk) { return new SegmentDescriptor( holder.getInterval(), holder.getVersion(), chunk.getChunkNumber() ); } } ); } } ); return getQueryRunnerForSegments(query, specs); } @Override public <T> QueryRunner<T> getQueryRunnerForSegments(final Query<T> query, final Iterable<SegmentDescriptor> specs) { // We only handle one particular dataSource. Make sure that's what we have, then ignore from here on out. if (!(query.getDataSource() instanceof TableDataSource) || !dataSource.equals(((TableDataSource) query.getDataSource()).getName())) { log.makeAlert("Received query for unknown dataSource") .addData("dataSource", query.getDataSource()) .emit(); return new NoopQueryRunner<>(); } final QueryRunnerFactory<T, Query<T>> factory = conglomerate.findFactory(query); if (factory == null) { throw new ISE("Unknown query type[%s].", query.getClass()); } final QueryToolChest<T, Query<T>> toolChest = factory.getToolchest(); final boolean skipIncrementalSegment = query.getContextValue(CONTEXT_SKIP_INCREMENTAL_SEGMENT, false); final AtomicLong cpuTimeAccumulator = new AtomicLong(0L); Iterable<QueryRunner<T>> perSegmentRunners = Iterables.transform( specs, descriptor -> { final PartitionHolder<Sink> holder = sinkTimeline.findEntry( descriptor.getInterval(), descriptor.getVersion() ); if (holder == null) { return new ReportTimelineMissingSegmentQueryRunner<>(descriptor); } final PartitionChunk<Sink> chunk = holder.getChunk(descriptor.getPartitionNumber()); if (chunk == null) { return new ReportTimelineMissingSegmentQueryRunner<>(descriptor); } final Sink theSink = chunk.getObject(); final SegmentId sinkSegmentId = theSink.getSegment().getId(); Iterable<QueryRunner<T>> perHydrantRunners = new SinkQueryRunners<>( Iterables.transform( theSink, hydrant -> { // Hydrant might swap at any point, but if it's swapped at the start // then we know it's *definitely* swapped. final boolean hydrantDefinitelySwapped = hydrant.hasSwapped(); if (skipIncrementalSegment && !hydrantDefinitelySwapped) { return new Pair<>(Intervals.ETERNITY, new NoopQueryRunner<>()); } // Prevent the underlying segment from swapping when its being iterated final Pair<Segment, Closeable> segmentAndCloseable = hydrant.getAndIncrementSegment(); try { QueryRunner<T> runner = factory.createRunner(segmentAndCloseable.lhs); // 1) Only use caching if data is immutable // 2) Hydrants are not the same between replicas, make sure cache is local if (hydrantDefinitelySwapped && cache.isLocal()) { runner = new CachingQueryRunner<>( makeHydrantCacheIdentifier(hydrant), descriptor, objectMapper, cache, toolChest, runner, // Always populate in foreground regardless of config new ForegroundCachePopulator( objectMapper, cachePopulatorStats, cacheConfig.getMaxEntrySize() ), cacheConfig ); } // Make it always use Closeable to decrement() runner = QueryRunnerHelper.makeClosingQueryRunner( runner, segmentAndCloseable.rhs ); return new Pair<>(segmentAndCloseable.lhs.getDataInterval(), runner); } catch (RuntimeException e) { CloseQuietly.close(segmentAndCloseable.rhs); throw e; } } ) ); return new SpecificSegmentQueryRunner<>( withPerSinkMetrics( new BySegmentQueryRunner<>( sinkSegmentId, descriptor.getInterval().getStart(), factory.mergeRunners( Execs.directExecutor(), perHydrantRunners ) ), toolChest, sinkSegmentId, cpuTimeAccumulator ), new SpecificSegmentSpec(descriptor) ); } ); final QueryRunner<T> mergedRunner = toolChest.mergeResults( factory.mergeRunners( queryExecutorService, perSegmentRunners ) ); return CPUTimeMetricQueryRunner.safeBuild( new FinalizeResultsQueryRunner<>(mergedRunner, toolChest), toolChest, emitter, cpuTimeAccumulator, true ); } /** * Decorates a Sink's query runner to emit query/segmentAndCache/time, query/segment/time, query/wait/time once * each for the whole Sink. Also adds CPU time to cpuTimeAccumulator. */ private <T> QueryRunner<T> withPerSinkMetrics( final QueryRunner<T> sinkRunner, final QueryToolChest<T, ? extends Query<T>> queryToolChest, final SegmentId sinkSegmentId, final AtomicLong cpuTimeAccumulator ) { // Note: reportSegmentAndCacheTime and reportSegmentTime are effectively the same here. They don't split apart // cache vs. non-cache due to the fact that Sinks may be partially cached and partially uncached. Making this // better would need to involve another accumulator like the cpuTimeAccumulator that we could share with the // sinkRunner. String sinkSegmentIdString = sinkSegmentId.toString(); return CPUTimeMetricQueryRunner.safeBuild( new MetricsEmittingQueryRunner<>( emitter, queryToolChest, new MetricsEmittingQueryRunner<>( emitter, queryToolChest, sinkRunner, QueryMetrics::reportSegmentTime, queryMetrics -> queryMetrics.segment(sinkSegmentIdString) ), QueryMetrics::reportSegmentAndCacheTime, queryMetrics -> queryMetrics.segment(sinkSegmentIdString) ).withWaitMeasuredFromNow(), queryToolChest, emitter, cpuTimeAccumulator, false ); } public VersionedIntervalTimeline<String, Sink> getSinkTimeline() { return sinkTimeline; } public static String makeHydrantCacheIdentifier(FireHydrant input) { return input.getSegmentId() + "_" + input.getCount(); } }
package me.aatma.languagetologic.nl; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Predicate; import edu.stanford.nlp.ling.CoreLabel; import edu.stanford.nlp.trees.CollinsHeadFinder; import edu.stanford.nlp.trees.Tree; import me.aatma.languagetologic.SemanticCombination; import me.aatma.library.mapi.eventapi.constants.EventConstants; import me.aatma.library.mapi.nlvocabapi.Thing; import me.aatma.library.mapi.nlvocabapi.jenaImpl.ThingImpl; import me.aatma.library.sapi.BinaryPredicate; import me.aatma.library.sapi.Context; import me.aatma.library.sapi.SCollection; import me.aatma.library.sapi.SIndividual; import me.aatma.library.sapi.SObject; import me.aatma.library.sapi.SPredicate; import me.aatma.library.sapi.STerm; import me.aatma.library.sapi.config.SAPIConfiguration; import me.aatma.library.sapi.iterator.SObjectIterator; import me.aatma.library.sapi.jenasclient.BinaryPredicateImpl; import me.aatma.library.sapi.jenasclient.SCollectionImpl; import me.aatma.library.sapi.jenasclient.SIndividualImpl; import me.aatma.library.sapi.jenasclient.SObjectImpl; import me.aatma.library.sapi.jenasclient.SPredicateImpl; import me.aatma.library.stanfordcorenlputils.FilterInPOS; import me.aatma.library.stanfordcorenlputils.FilterInPOSPattern; import me.aatma.library.stanfordcorenlputils.NLPTools; /** * Created by vkantharaj on 6/9/16. */ public class NounPhrase extends AnalyzePhraseImpl implements AnalyzePhrase { private static final Logger log = LoggerFactory.getLogger(NounPhrase.class.getCanonicalName()); public NounPhrase (Tree t, Class retType) { this.treeToAnalyze = t; this.retType = retType; } public NounPhrase (Tree t, Class retType, SemanticCombination.SentenceType sentenceType) { this.treeToAnalyze = t; this.retType = retType; this.sentenceType = sentenceType; } // handle adjective // handle proper noun // handle common noun // handle mass and count nouns // handle quantities @Override public List<Object> parse() { List<Object> semPossibilities = new ArrayList<>(); Tree t = this.treeToAnalyze; Class type = this.retType; if (t == null) { return semPossibilities; } if (t.value().equals("ADJP")) { AnalyzePhrase ap = new AdjPhrase(t, type); return ap.parse(); // return parseAdjP(t, type); } // BuildNumbers bn = new BuildNumbers(); // // TODO - Done: BuildNumbers should deal with SemanticMatch, because it knows how the nodes // // are setup. It should return the SemanticGraph or SemanticPossibilites (if its just a node) // // This returns MAPI objects, primarily QuantityImpl and its subclasses // // from UOMAPI // List<Object> tempSemPos = BuildNumbers.matches(t); // if (!tempSemPos.isEmpty()){ // log.info("Found a quantity determiner: " + tempSemPos); // return tempSemPos; // } // Analyzing the lowest level NP, which doesn't have any other NP as its child String det = ""; if (t.firstChild() != null && t.firstChild().value().equals("DT")) { log.debug("DT found: " + t.firstChild().firstChild()); det = t.firstChild().firstChild().value(); } String numOrdinalAdj = ""; // This will track "first", "second", "third", "forth" only - atleast for now Tree head = t.headPreTerminal(new CollinsHeadFinder()); log.info("Parsing NP: " + t.firstChild().value()); log.debug("Head: " + head); int headPosWODet = t.objectIndexOf(head) - ((det == null) ? 0 : 1); int phraseLenWODet = t.numChildren() - ((det == null) ? 0 : 1); String headLemma = ((CoreLabel)head.firstChild().label()).lemma(); headLemma = headLemma.toLowerCase(); log.info("Head location: " + headPosWODet); log.info("Head Lemma: " + headLemma); log.info("Phrase length: " + phraseLenWODet); List<Tree> all_parts = new ArrayList<>(); List<Tree> all_nn_parts = new ArrayList<>(); List<Tree> nn_parts = new ArrayList<>(); List<Tree> nnp_parts = new ArrayList<>(); List<Tree> all_prp_parts = new ArrayList<>(); List<Tree> vbg_parts = new ArrayList<>(); //TODO: Things to handle here. But we don't have to go overboard. // For a while recommended procedure is to make NPs as simple as possible and // assert separate property based statements. Instead of "the old man ate blah", // say, the man ate blah, and the man is old. // 1. Orangish red: make into one word // 2. NN and NNP classification is unreliable, try various combinations when looking up // the KB // 3. Tall reddish brown tree trunk. Separate out the JJs, "tall", "reddish brown" // and build the assertions right here? Or make them sub-collections? // 4. Handle adjective phrase with adverbial component, "slightly old" Predicate<Tree> nn_all = new FilterInPOSPattern("NN.?"); // Doesn't cover NNPS Predicate<Tree> prp_all = new FilterInPOSPattern("PRP.?"); // Covers PRP and PRP$ (the possessive pronoun) Predicate<Tree> vbg = new FilterInPOS("VBG"); // Gerunds in Noun Phrases for (Tree c : t.children()) { log.debug("Child: " + c); if (!c.value().equals("DT") && !isLeafTreeANumber(c)) { all_parts.add(c.firstChild()); } if (c.value().equals("JJ")) { if (c.firstChild().value().equals("red")) {// is instance of color log.debug("Found Color: " + c.firstChild()); } else if (c.firstChild().value().equals("big")) { log.debug("Size in a context: " + c.firstChild()); } if (isLeafTreeANumber(c)) { numOrdinalAdj = c.firstChild().value(); } } else if (nn_all.test(c)) { all_nn_parts.add(c.firstChild()); if (c.value().equals("NN") || c.value().equals("NNS") // If its plural, should we add the lemma? ) { nn_parts.add(c.firstChild()); } else if (c.value().equals("NNP")) { nnp_parts.add(c.firstChild()); } } else if (prp_all.test(c)) { all_prp_parts.add(c.firstChild()); } else if (vbg.test(c)) { vbg_parts.add(c.firstChild()); } } log.trace("NNP Parts" + nnp_parts); log.trace("NN Parts" + nn_parts); log.trace("All NN.? Parts: " + all_nn_parts); log.trace("All PRP.? Parts: " + all_nn_parts); // all_nn_string is not used any more to search class/collection concepts // String all_nn_string = NLPTools.treeLeavesToString(all_nn_parts); // log.debug("all NN.? String: >" + all_nn_string + "<"); String all_parts_string = NLPTools.treeLeavesToString(all_parts); log.debug("all parts string: >" + all_parts_string + "<"); // @todo we might have to replace this infrastructure with tree regular expression // "His son is a soldier" parses to (NP (PRP$ His) (NN son)) : it has PRP$ and NN // It will go into all_nn_parts branch // @TODO: Consolidate this. Multiple checks for Predicate.class. // @TODO: The term creation should be in CleanAndCombineGraph, to track // the new terms created. if (!nn_parts.isEmpty() && !nnp_parts.isEmpty()) { log.info("We have mix of nn_parts: " + nn_parts + " and nnp_parts: " + nnp_parts); // We have a mix of NNP and NN/NNS // Just go by the determiner List<SObject> headNoun_object; if (det != null){ // make it a SCollection log.debug("We have a det: " + det + " so find an instance of collection."); headNoun_object = searchCollection.searchConcepts(all_parts_string); } else { // make it an individual log.debug("We do not have a det: " + det + " so find an instance of individual."); headNoun_object = searchIndividual.searchConcepts(all_parts_string); } // There seems to be a java bug. Can't add values to List<? extends Object> // so had to change headNoun_object to List<SObject> from List<? extends Object> // So for semPossibilities, use addAll. semPossibilities.addAll(headNoun_object); } else if (!nn_parts.isEmpty()){ log.info("We have only nn_parts: " + nn_parts); List<SObject> headNoun_object; if (type != null && type.equals(SPredicate.class)) { log.debug("Find a predicate for nn_parts"); headNoun_object = searchPredicate.searchConcepts(all_parts_string); if (headNoun_object.isEmpty()) { SPredicate p = null; // This case was added because, we wanted to find predicates that didn't have lexification, // but had a direct syntactic manipulation of the phrase to constant name, in the KB // But if we presume that all predicates will have lexification, and the search includes // the local name, this extra search is unnecessary String potentialPredName = stringToConstantName(all_parts_string, SPredicateImpl.class); List<SObject> predsUsingLocalName = searchPredicate.searchConcepts(potentialPredName); // if (!BinaryPredicateImpl.existsAsType(potentialPredName)) { if (predsUsingLocalName.isEmpty()) { BinaryPredicate pTemp = BinaryPredicateImpl.findOrCreate( searchCtxNs + deriveRandomizedName (all_parts_string, SPredicateImpl.class), searchCtx); Map<String, Object> m = new HashMap<>(); m.put("newTerm", true); p = new BinaryPredicateImpl(pTemp, m); headNoun_object.add(p); } else { // p = BinaryPredicateImpl.get(searchCtxNs + potentialPredName, searchCtx); p = (BinaryPredicate)predsUsingLocalName.get(0); // just pick the first one // TODO: Fix the above, first it assumes only BinaryPredicate // two it picks the first one headNoun_object.add(p); } // addNL(t, det, p, all_parts, new SpeechPartImpl("CountNoun")); addNL(t, det, p, all_parts); } } else { log.debug("Find a collection for nn_parts"); headNoun_object = searchCollection.searchConcepts(all_parts_string); if (headNoun_object.isEmpty()){ SCollection c = SCollectionImpl.findOrCreate( searchCtxNs + deriveRandomizedName (all_parts_string, null), searchCtx); // c.addArg2(BinaryPredicateImpl.get("nameString"), all_parts_string, ContextImpl.get("GeneralEnglishMt")); ((SObjectImpl)c).getSboData().put("nlDet", det); if (numOrdinalAdj != null) { ((SObjectImpl)c).getSboData().put("numOrdinalAdj", numOrdinalAdj); log.trace("Adding numOrdinalAdj: " + numOrdinalAdj + " to collection: " + c); } ((SObjectImpl)c).getSboData().put("newTerm", true); // addNL(t, det, c, all_parts, new SpeechPartImpl("CountNoun")); addNL(t, det, c, all_parts); headNoun_object.add(c); } else { for (Object o : headNoun_object) { ((SObjectImpl)o).getSboData().put("nlDet", det); if (numOrdinalAdj != null) { ((SObjectImpl)o).getSboData().put("numOrdinalAdj", numOrdinalAdj); log.trace("Adding numOrdinalAdj: " + numOrdinalAdj + " to collection: " + o); } } if (this.sentenceType != null && (this.sentenceType.equals(SemanticCombination.SentenceType.FACT) || this.sentenceType.equals(SemanticCombination.SentenceType.QUERY))) { log.debug("We have a FACT or a QUERY, try to resolve definite article in the context."); headNoun_object = this.findIndividualForDefiniteArticle(headNoun_object, searchCtx); } } } semPossibilities.addAll(headNoun_object); } else if (!nnp_parts.isEmpty()) { log.info("We have only nnp_parts: " + nnp_parts + ". So find an individual."); // Use all of the string if even a small part is NNP String nnp_string = NLPTools.treeLeavesToString(nnp_parts); log.debug("NNP String: >" + nnp_string + "<"); semPossibilities.addAll(searchIndividual.searchConcepts(all_parts_string)); //nnp_string); if (semPossibilities.isEmpty()){ // KBIndividual i = KBIndividualImpl.findOrCreate(deriveRandomizedName (nnp_string)); // i.addArg2(BinaryPredicateImpl.get("nameString"), nnp_string, ContextImpl.get("GeneralEnglishMt")); SIndividual i = SIndividualImpl.findOrCreate( searchCtxNs + deriveRandomizedName (all_parts_string, null), searchCtx); //i.addArg2(BinaryPredicateImpl.get("nameString"), all_parts_string, ContextImpl.get("GeneralEnglishMt")); Thing indThing = new ThingImpl(i); // indThing.setNameString(ContextImpl.get("GeneralEnglishMt"), all_parts_string); indThing.sentenceCanonicalPhraseOfConcept(all_parts_string).assertIn(searchCtx); ((SObjectImpl)i).getSboData().put("newTerm", true); semPossibilities.add(i); } } else if (!all_prp_parts.isEmpty()) { log.info("We have prp_parts: " + all_prp_parts); // This will only handle "I", "me", "myself", // 1. not "my head" - this would require handling prp and nn. // Also, such relations should be explicitly specified anyways: // Intantiate, "A head" and say "The head is part of me". // 2. not "he", "it". This would require contextual disambiguation, than just assuming String all_prp_string = NLPTools.treeLeavesToString(all_prp_parts); log.debug("PRP String: >" + all_prp_string + "<"); if (all_prp_string.equalsIgnoreCase("I") || all_prp_string.equalsIgnoreCase("me") || all_prp_string.equalsIgnoreCase("myself") ) { if (SAPIConfiguration.getCurrentUser() != null){ semPossibilities.add(SAPIConfiguration.getCurrentUser()); } else { log.error("SAPIConfiguration.getCurrentUser() returned null"); } } else { log.error("No head noun based collection found."); // throw new Exception("No head noun based collection found."); } } else if (!vbg_parts.isEmpty()) { String vbg_str = NLPTools.treeLeavesToString(vbg_parts); List<SObject> parseObjs = searchEventSpecs.searchConcepts(vbg_str); for (SObject o : parseObjs){ if (o instanceof SCollection){ semPossibilities.add(SCollection.class.cast(o)); } } log.info("Found events for Gerund (vbg): " + semPossibilities); if (semPossibilities.isEmpty()) { log.info("Creating new vocabulary!!"); // SCollection cTemp = FirstOrderCollectionImpl.findOrCreate(deriveRandomizedName(all_vp_inc_phrases, null)); SCollection cTemp = SCollectionImpl.findOrCreate( searchCtxNs + deriveRandomizedName(vbg_str, null), searchCtx); Map<String, Object> m = new HashMap<>(); m.put("newTerm", true); SCollection c = new SCollectionImpl(cTemp, m); c.addGeneralization(EventConstants.event, searchCtx); semPossibilities.add(c); // addNLVP(t.headPreTerminal(new CollinsHeadFinder()), c, all_vp_parts, new SpeechPartImpl("Verb")); VerbPhrase vp = new VerbPhrase(t, false); // This for now can be anything, since it is not used // addNLVP was static, so it got all the params it needed, the argument signature was not changed, // when it was made non-static vp.addNLVP(t.headPreTerminal(new CollinsHeadFinder()), c, vbg_parts); } } // Get an instance of NP head if (det != null) { if (det.equals("a") || det.equals("this") || det.equals("that") || det.equals("an")) { //create an individual /* // type needs to be decided at the persist stage. if (type.equals(KBIndividual.class)) { String randomize = RandomStringUtils.randomAlphanumeric(10); } else if (type.equals(SCollection.class)) { } */ } else if (det.equals("the")) { // This is different. We have to refer to the context before, to find the indeterminate // object referred before this. So we can't create a new individual. } else if (det.equals("all") || det.equals("some") || det.equals("any")) { } } log.info("Found KB Collection, Individual or Predicate for noun: " + semPossibilities); return semPossibilities; } boolean isLeafTreeANumber (Tree t) { if (t.isLeaf()) { return false; } String num = t.firstChild().value(); if (num.equalsIgnoreCase("first") || num.equalsIgnoreCase("second") || num.equalsIgnoreCase("third") || num.equalsIgnoreCase("fourth")) { return true; } else { return false; } } // TODO: Possibly move to Determiners class. List<SObject> findIndividualForDefiniteArticle (List<SObject> possibilities, Context ctx) { log.debug("Finding individual for collections with 'the' determiner"); List<SObject> modifiedPossibilities = new ArrayList<>(); for (SObject o : possibilities) { if (o instanceof SCollection) { SCollection col = (SCollection) o; if (((SObjectImpl)col).getSboData().containsKey("nlDet") && ((String)((SObjectImpl)col).getSboData().get("nlDet")).equalsIgnoreCase("the")) { // TODO: This will get instances only in the current context, not from the imported contexts // make getInstances more general.. with options SObjectIterator<SIndividual> insts = col.getInstances(ctx, SIndividual.class); boolean foundInst = false; while (insts.hasNext()) { foundInst = true; SIndividual ind = insts.next(); log.debug("Found individual: {} as instance of collection: {}, in context {}", ind, col, ctx); ((SObjectImpl)ind).setTypeCore(col); modifiedPossibilities.add(ind); } if (!foundInst) { modifiedPossibilities.add(o); } } else { modifiedPossibilities.add(o); } } else { modifiedPossibilities.add(o); } } return modifiedPossibilities; } }
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.beliefs.bayes.model; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import com.thoughtworks.xstream.XStream; import com.thoughtworks.xstream.io.xml.DomDriver; import org.drools.beliefs.bayes.BayesNetwork; import org.drools.beliefs.bayes.BayesVariable; import org.drools.beliefs.bayes.assembler.BayesNetworkAssemblerError; import org.drools.beliefs.graph.GraphNode; import org.drools.beliefs.graph.impl.EdgeImpl; import org.drools.compiler.compiler.ParserError; import org.drools.core.io.internal.InternalResource; import org.kie.api.io.Resource; import org.kie.internal.builder.KnowledgeBuilderError; import static org.kie.soup.commons.xstream.XStreamUtils.createTrustingXStream; public class XmlBifParser { public static Bif loadBif(Resource resource, ArrayList<KnowledgeBuilderError> errors) { InputStream is = null; try { is = resource.getInputStream(); } catch (IOException e) { errors.add( new ParserError(resource, "Exception opening Stream:\n" + e.toString(), 0, 0) ); return null; } try { String encoding = resource instanceof InternalResource ? ((InternalResource) resource).getEncoding() : null; XStream xstream = encoding != null ? createTrustingXStream(new DomDriver(encoding)) : createTrustingXStream(); initXStream(xstream); Bif bif = (Bif) xstream.fromXML(is); return bif; } catch (Exception e) { errors.add( new BayesNetworkAssemblerError(resource, "Unable to parse opening Stream:\n" + e.toString()) ); return null; } } public static Bif loadBif(URL url) { XStream xstream = createTrustingXStream(); initXStream( xstream ); Bif bif = (Bif) xstream.fromXML(url); return bif; } private static void initXStream(XStream xstream) { xstream.processAnnotations(Bif.class); xstream.processAnnotations(Network.class); xstream.processAnnotations(Probability.class); xstream.processAnnotations(Definition.class); } public static BayesNetwork buildBayesNetwork(Bif bif) { String name = bif.getNetwork().getName(); String packageName = "default"; List<String> props = bif.getNetwork().getProperties(); if (props != null ) { for ( String prop : props ) { prop = prop.trim(); if (prop.startsWith("package") ) { packageName = prop.substring( prop.indexOf('=') + 1).trim(); } } } BayesNetwork graph = new BayesNetwork(name, packageName); Map<String, GraphNode<BayesVariable>> map = new HashMap<String, GraphNode<BayesVariable>>(); for (Definition def : bif.getNetwork().getDefinitions()) { GraphNode<BayesVariable> node = graph.addNode(); BayesVariable var = buildVariable(def, bif.getNetwork(), node.getId()); node.setContent( var ); map.put( var.getName(), node ); } for(Entry<String, GraphNode<BayesVariable>> entry : map.entrySet()) { GraphNode<BayesVariable> node = entry.getValue(); BayesVariable var = node.getContent(); if ( var.getGiven() != null && var.getGiven().length > 0 ) { for ( String given : var.getGiven() ) { GraphNode<BayesVariable> givenNode = map.get( given ); EdgeImpl e = new EdgeImpl(); e.setOutGraphNode(givenNode); e.setInGraphNode(node); } } } return graph; } private static BayesVariable buildVariable(Definition def, Network network, int id) { List<String> outcomes = new ArrayList(); getOutcomesByVariable(network, def.getName(), outcomes); List<String> given = (def.getGiven() == null) ? Collections.<String>emptyList() : def.getGiven(); return new BayesVariable<String>(def.getName(), id, outcomes.toArray( new String[ outcomes.size()] ), getProbabilities(def.getProbabilities(), outcomes), given.toArray(new String[given.size()]) ); } private static void getOutcomesByVariable(Network network, String nameDefinition, List<String> outcomes) { for (Variable var : network.getVariables()) { if (var.getName().equals(nameDefinition)) { for (String outcome : var.getOutComes()) { outcomes.add(outcome); } } } } private static double[][] getProbabilities(String table,List<String> outcomes) { table = table.trim(); String[] values = table.split(" "); double probabilities[][] = new double[values.length/2][outcomes.size()]; int k = 0; for(int i = 0, length = values.length/2; i < length; i++){ for(int j = 0; j < outcomes.size(); j++){ probabilities[i][j] = Double.valueOf(values[k++]); } } return probabilities; } private static double[][] getPosition(String stringPosition, double[][] position) { if (stringPosition != null) { stringPosition = clearStringPostion(stringPosition); int i = 0; int j = 0; for (String pos : stringPosition.split(",")) { position[i][j] = Double.parseDouble(pos); if (i < j) { i += 1; } j += 1; } } return null; } private static String clearStringPostion(String stringPosition){ stringPosition = stringPosition.replace("position", ""); stringPosition = stringPosition.replace("=", ""); stringPosition = stringPosition.replace("(", ""); stringPosition = stringPosition.replace(")", ""); stringPosition = stringPosition.trim(); return stringPosition; } // private void setIncomingNodes(BayesNetwork bayesNetwork){ // for(BayesVariable node : bayesNetwork.getNodos()){ // if(node.getGiven()!=null && !node.getGiven().isEmpty()){ // node.setIncomingNodes(this.getNodesByGiven(node.getGiven(), bayesNetwork.getNodos())); // } // } // } // // private List<BayesVariable> getNodesByGiven(List<String> given, List<BayesVariable> nodes){ // List<BayesVariable> listIncoming = new ArrayList(); // for(String giv : given){ // for(BayesVariable node : nodes){ // if(node.getName().equals(giv)){ // listIncoming.add(node); // break; // } // } // } // return listIncoming; // } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.discovery.azure; import com.microsoft.windowsazure.management.compute.models.DeploymentSlot; import com.microsoft.windowsazure.management.compute.models.DeploymentStatus; import com.sun.net.httpserver.Headers; import com.sun.net.httpserver.HttpsConfigurator; import com.sun.net.httpserver.HttpsServer; import org.elasticsearch.cloud.azure.management.AzureComputeService; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.io.FileSystemUtils; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsModule; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.env.Environment; import org.elasticsearch.node.Node; import org.elasticsearch.plugin.discovery.azure.AzureDiscoveryPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.transport.TransportSettings; import org.junit.AfterClass; import org.junit.BeforeClass; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManagerFactory; import javax.xml.XMLConstants; import javax.xml.stream.XMLOutputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamWriter; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.StringWriter; import java.net.InetAddress; import java.net.InetSocketAddress; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.security.KeyStore; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.UUID; import java.util.concurrent.ExecutionException; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout; @ESIntegTestCase.SuppressLocalMode @ESIntegTestCase.ClusterScope(numDataNodes = 2, numClientNodes = 0) @SuppressForbidden(reason = "use http server") // TODO this should be a IT but currently all ITs in this project run against a real cluster public class AzureDiscoveryClusterFormationTests extends ESIntegTestCase { public static class TestPlugin extends Plugin { @Override public String name() { return AzureDiscoveryClusterFormationTests.class.getName(); } @Override public String description() { return AzureDiscoveryClusterFormationTests.class.getName(); } public void onModule(SettingsModule settingsModule) { settingsModule.registerSetting(AzureComputeService.Management.ENDPOINT_SETTING); } } private static HttpsServer httpsServer; private static Path logDir; @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return pluginList(AzureDiscoveryPlugin.class, TestPlugin.class); } private static Path keyStoreFile; @BeforeClass public static void setupKeyStore() throws IOException { Path tempDir = createTempDir(); keyStoreFile = tempDir.resolve("test-node.jks"); try (InputStream stream = AzureDiscoveryClusterFormationTests.class.getResourceAsStream("/test-node.jks")) { assertNotNull("can't find keystore file", stream); Files.copy(stream, keyStoreFile); } } @Override protected Settings nodeSettings(int nodeOrdinal) { Path resolve = logDir.resolve(Integer.toString(nodeOrdinal)); try { Files.createDirectory(resolve); } catch (IOException e) { throw new RuntimeException(e); } return Settings.builder().put(super.nodeSettings(nodeOrdinal)) .put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), AzureDiscoveryPlugin.AZURE) .put(Environment.PATH_LOGS_SETTING.getKey(), resolve) .put(TransportSettings.PORT.getKey(), 0) .put(Node.WRITE_PORTS_FIELD_SETTING.getKey(), "true") .put(AzureComputeService.Management.ENDPOINT_SETTING.getKey(), "https://" + InetAddress.getLoopbackAddress().getHostAddress() + ":" + httpsServer.getAddress().getPort()) .put(Environment.PATH_CONF_SETTING.getKey(), keyStoreFile.getParent().toAbsolutePath()) .put(AzureComputeService.Management.KEYSTORE_PATH_SETTING.getKey(), keyStoreFile.toAbsolutePath()) .put(AzureComputeService.Discovery.HOST_TYPE_SETTING.getKey(), AzureUnicastHostsProvider.HostType.PUBLIC_IP.name()) .put(AzureComputeService.Management.KEYSTORE_PASSWORD_SETTING.getKey(), "keypass") .put(AzureComputeService.Management.KEYSTORE_TYPE_SETTING.getKey(), "jks") .put(AzureComputeService.Management.SERVICE_NAME_SETTING.getKey(), "myservice") .put(AzureComputeService.Management.SUBSCRIPTION_ID_SETTING.getKey(), "subscription") .put(AzureComputeService.Discovery.DEPLOYMENT_NAME_SETTING.getKey(), "mydeployment") .put(AzureComputeService.Discovery.ENDPOINT_NAME_SETTING.getKey(), "myendpoint") .put(AzureComputeService.Discovery.DEPLOYMENT_SLOT_SETTING.getKey(), AzureUnicastHostsProvider.Deployment.PRODUCTION.name()) .build(); } /** * Creates mock EC2 endpoint providing the list of started nodes to the DescribeInstances API call */ @BeforeClass public static void startHttpd() throws Exception { logDir = createTempDir(); SSLContext sslContext = getSSLContext(); httpsServer = HttpsServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress().getHostAddress(), 0), 0); httpsServer.setHttpsConfigurator(new HttpsConfigurator(sslContext)); httpsServer.createContext("/subscription/services/hostedservices/myservice", (s) -> { Headers headers = s.getResponseHeaders(); headers.add("Content-Type", "text/xml; charset=UTF-8"); XMLOutputFactory xmlOutputFactory = XMLOutputFactory.newFactory(); xmlOutputFactory.setProperty(XMLOutputFactory.IS_REPAIRING_NAMESPACES, true); StringWriter out = new StringWriter(); XMLStreamWriter sw; try { sw = xmlOutputFactory.createXMLStreamWriter(out); sw.writeStartDocument(); String namespace = "http://schemas.microsoft.com/windowsazure"; sw.setDefaultNamespace(namespace); sw.writeStartElement(XMLConstants.DEFAULT_NS_PREFIX, "HostedService", namespace); { sw.writeStartElement("Deployments"); { Path[] files = FileSystemUtils.files(logDir); for (int i = 0; i < files.length; i++) { Path resolve = files[i].resolve("transport.ports"); if (Files.exists(resolve)) { List<String> addresses = Files.readAllLines(resolve); Collections.shuffle(addresses, random()); String address = addresses.get(0); int indexOfLastColon = address.lastIndexOf(':'); String host = address.substring(0, indexOfLastColon); String port = address.substring(indexOfLastColon + 1); sw.writeStartElement("Deployment"); { sw.writeStartElement("Name"); sw.writeCharacters("mydeployment"); sw.writeEndElement(); sw.writeStartElement("DeploymentSlot"); sw.writeCharacters(DeploymentSlot.Production.name()); sw.writeEndElement(); sw.writeStartElement("Status"); sw.writeCharacters(DeploymentStatus.Running.name()); sw.writeEndElement(); sw.writeStartElement("RoleInstanceList"); { sw.writeStartElement("RoleInstance"); { sw.writeStartElement("RoleName"); sw.writeCharacters(UUID.randomUUID().toString()); sw.writeEndElement(); sw.writeStartElement("IpAddress"); sw.writeCharacters(host); sw.writeEndElement(); sw.writeStartElement("InstanceEndpoints"); { sw.writeStartElement("InstanceEndpoint"); { sw.writeStartElement("Name"); sw.writeCharacters("myendpoint"); sw.writeEndElement(); sw.writeStartElement("Vip"); sw.writeCharacters(host); sw.writeEndElement(); sw.writeStartElement("PublicPort"); sw.writeCharacters(port); sw.writeEndElement(); } sw.writeEndElement(); } sw.writeEndElement(); } sw.writeEndElement(); } sw.writeEndElement(); } sw.writeEndElement(); } } } sw.writeEndElement(); } sw.writeEndElement(); sw.writeEndDocument(); sw.flush(); final byte[] responseAsBytes = out.toString().getBytes(StandardCharsets.UTF_8); s.sendResponseHeaders(200, responseAsBytes.length); OutputStream responseBody = s.getResponseBody(); responseBody.write(responseAsBytes); responseBody.close(); } catch (XMLStreamException e) { Loggers.getLogger(AzureDiscoveryClusterFormationTests.class).error("Failed serializing XML", e); throw new RuntimeException(e); } }); httpsServer.start(); } private static SSLContext getSSLContext() throws Exception { char[] passphrase = "keypass".toCharArray(); KeyStore ks = KeyStore.getInstance("JKS"); try (InputStream stream = AzureDiscoveryClusterFormationTests.class.getResourceAsStream("/test-node.jks")) { assertNotNull("can't find keystore file", stream); ks.load(stream, passphrase); } KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509"); kmf.init(ks, passphrase); TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509"); tmf.init(ks); SSLContext ssl = SSLContext.getInstance("TLS"); ssl.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null); return ssl; } @AfterClass public static void stopHttpd() throws IOException { for (int i = 0; i < internalCluster().size(); i++) { // shut them all down otherwise we get spammed with connection refused exceptions internalCluster().stopRandomDataNode(); } httpsServer.stop(0); httpsServer = null; logDir = null; } public void testJoin() throws ExecutionException, InterruptedException { // only wait for the cluster to form assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(2)).get()); // add one more node and wait for it to join internalCluster().startDataOnlyNodeAsync().get(); assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(3)).get()); } }
/* * Copyright 2014 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.rpc.server; import com.navercorp.pinpoint.common.util.Assert; import com.navercorp.pinpoint.common.util.StringUtils; import com.navercorp.pinpoint.rpc.ChannelWriteFailListenableFuture; import com.navercorp.pinpoint.rpc.Future; import com.navercorp.pinpoint.rpc.ResponseMessage; import com.navercorp.pinpoint.rpc.client.RequestManager; import com.navercorp.pinpoint.rpc.client.WriteFailFutureListener; import com.navercorp.pinpoint.rpc.cluster.ClusterOption; import com.navercorp.pinpoint.rpc.cluster.Role; import com.navercorp.pinpoint.rpc.common.CyclicStateChecker; import com.navercorp.pinpoint.rpc.common.SocketStateChangeResult; import com.navercorp.pinpoint.rpc.common.SocketStateCode; import com.navercorp.pinpoint.rpc.control.ProtocolException; import com.navercorp.pinpoint.rpc.packet.ControlHandshakePacket; import com.navercorp.pinpoint.rpc.packet.ControlHandshakeResponsePacket; import com.navercorp.pinpoint.rpc.packet.HandshakeResponseCode; import com.navercorp.pinpoint.rpc.packet.Packet; import com.navercorp.pinpoint.rpc.packet.PacketType; import com.navercorp.pinpoint.rpc.packet.PingPacket; import com.navercorp.pinpoint.rpc.packet.PingPayloadPacket; import com.navercorp.pinpoint.rpc.packet.PongPacket; import com.navercorp.pinpoint.rpc.packet.RequestPacket; import com.navercorp.pinpoint.rpc.packet.ResponsePacket; import com.navercorp.pinpoint.rpc.packet.SendPacket; import com.navercorp.pinpoint.rpc.packet.ServerClosePacket; import com.navercorp.pinpoint.rpc.packet.stream.StreamPacket; import com.navercorp.pinpoint.rpc.server.handler.DoNothingChannelStateEventHandler; import com.navercorp.pinpoint.rpc.server.handler.ServerStateChangeEventHandler; import com.navercorp.pinpoint.rpc.stream.ClientStreamChannel; import com.navercorp.pinpoint.rpc.stream.ClientStreamChannelContext; import com.navercorp.pinpoint.rpc.stream.ClientStreamChannelMessageListener; import com.navercorp.pinpoint.rpc.stream.StreamChannelContext; import com.navercorp.pinpoint.rpc.stream.StreamChannelManager; import com.navercorp.pinpoint.rpc.stream.StreamChannelStateChangeEventHandler; import com.navercorp.pinpoint.rpc.util.ClassUtils; import com.navercorp.pinpoint.rpc.util.ControlMessageEncodingUtils; import com.navercorp.pinpoint.rpc.util.IDGenerator; import com.navercorp.pinpoint.rpc.util.ListUtils; import com.navercorp.pinpoint.rpc.util.MapUtils; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFuture; import org.jboss.netty.channel.ChannelFutureListener; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.net.SocketAddress; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; /** * @author Taejin Koo */ public class DefaultPinpointServer implements PinpointServer { private final Logger logger = LoggerFactory.getLogger(this.getClass()); private final long startTimestamp = System.currentTimeMillis(); private final Channel channel; private final RequestManager requestManager; private final DefaultPinpointServerState state; private final CyclicStateChecker stateChecker; private HealthCheckStateContext healthCheckStateContext = new HealthCheckStateContext(); private final ServerMessageListener messageListener; private final List<ServerStateChangeEventHandler> stateChangeEventListeners; private final StreamChannelManager streamChannelManager; private final AtomicReference<Map<Object, Object>> properties = new AtomicReference<Map<Object, Object>>(); private final String objectUniqName; private final ClusterOption localClusterOption; private ClusterOption remoteClusterOption; private final ChannelFutureListener serverCloseWriteListener; private final ChannelFutureListener responseWriteFailListener; private final WriteFailFutureListener pongWriteFutureListener = new WriteFailFutureListener(logger, "pong write fail.", "pong write success."); public DefaultPinpointServer(Channel channel, PinpointServerConfig serverConfig) { this(channel, serverConfig, null); } public DefaultPinpointServer(Channel channel, PinpointServerConfig serverConfig, ServerStateChangeEventHandler... stateChangeEventListeners) { this.channel = channel; this.messageListener = serverConfig.getMessageListener(); StreamChannelManager streamChannelManager = new StreamChannelManager(channel, IDGenerator.createEvenIdGenerator(), serverConfig.getStreamMessageListener()); this.streamChannelManager = streamChannelManager; this.stateChangeEventListeners = new ArrayList<ServerStateChangeEventHandler>(); List<ServerStateChangeEventHandler> configuredStateChangeEventHandlers = serverConfig.getStateChangeEventHandlers(); if (configuredStateChangeEventHandlers != null) { for (ServerStateChangeEventHandler configuredStateChangeEventHandler : configuredStateChangeEventHandlers) { ListUtils.addIfValueNotNull(this.stateChangeEventListeners, configuredStateChangeEventHandler); } } ListUtils.addAllExceptNullValue(this.stateChangeEventListeners, stateChangeEventListeners); if (this.stateChangeEventListeners.isEmpty()) { this.stateChangeEventListeners.add(DoNothingChannelStateEventHandler.INSTANCE); } RequestManager requestManager = new RequestManager(serverConfig.getRequestManagerTimer(), serverConfig.getDefaultRequestTimeout()); this.requestManager = requestManager; this.objectUniqName = ClassUtils.simpleClassNameAndHashCodeString(this); this.serverCloseWriteListener = new WriteFailFutureListener(logger, objectUniqName + " sendClosePacket() write fail.", "serverClosePacket write success"); this.responseWriteFailListener = new WriteFailFutureListener(logger, objectUniqName + " response() write fail."); this.state = new DefaultPinpointServerState(this, this.stateChangeEventListeners); this.stateChecker = new CyclicStateChecker(5); this.localClusterOption = serverConfig.getClusterOption(); } public void start() { logger.info("{} start() started. channel:{}.", objectUniqName, channel); state.toConnected(); state.toRunWithoutHandshake(); logger.info("{} start() completed.", objectUniqName); } public void stop() { logger.info("{} stop() started. channel:{}.", objectUniqName, channel); stop(false); logger.info("{} stop() completed.", objectUniqName); } public void stop(boolean serverStop) { try { SocketStateCode currentStateCode = getCurrentStateCode(); if (SocketStateCode.BEING_CLOSE_BY_SERVER == currentStateCode) { state.toClosed(); } else if (SocketStateCode.BEING_CLOSE_BY_CLIENT == currentStateCode) { state.toClosedByPeer(); } else if (SocketStateCode.isRun(currentStateCode) && serverStop) { state.toUnexpectedClosed(); } else if (SocketStateCode.isRun(currentStateCode)) { state.toUnexpectedClosedByPeer(); } else if (SocketStateCode.isClosed(currentStateCode)) { logger.warn("{} stop(). Socket has closed state({}).", objectUniqName, currentStateCode); } else { state.toErrorUnknown(); logger.warn("{} stop(). Socket has unexpected state.", objectUniqName, currentStateCode); } if (this.channel.isConnected()) { channel.close(); } } finally { streamChannelManager.close(); } } @Override public void send(byte[] payload) { Assert.requireNonNull(payload, "payload must not be null."); if (!isEnableDuplexCommunication()) { throw new IllegalStateException("Send fail. Error: Illegal State. pinpointServer:" + toString()); } SendPacket send = new SendPacket(payload); write0(send); } @Override public Future<ResponseMessage> request(byte[] payload) { Assert.requireNonNull(payload, "payload must not be null."); if (!isEnableDuplexCommunication()) { throw new IllegalStateException("Request fail. Error: Illegal State. pinpointServer:" + toString()); } RequestPacket requestPacket = new RequestPacket(payload); ChannelWriteFailListenableFuture<ResponseMessage> messageFuture = this.requestManager.register(requestPacket); write0(requestPacket, messageFuture); return messageFuture; } @Override public void response(RequestPacket requestPacket, byte[] payload) { response(requestPacket.getRequestId(), payload); } @Override public void response(int requestId, byte[] payload) { Assert.requireNonNull(payload, "payload must not be null."); if (!isEnableCommunication()) { throw new IllegalStateException("Response fail. Error: Illegal State. pinpointServer:" + toString()); } ResponsePacket responsePacket = new ResponsePacket(requestId, payload); write0(responsePacket, responseWriteFailListener); } private ChannelFuture write0(Object message) { return write0(message, null); } private ChannelFuture write0(Object message, ChannelFutureListener futureListener) { ChannelFuture future = channel.write(message); if (futureListener != null) { future.addListener(futureListener); } return future; } public StreamChannelContext getStreamChannel(int channelId) { return streamChannelManager.findStreamChannel(channelId); } @Override public ClientStreamChannelContext openStream(byte[] payload, ClientStreamChannelMessageListener messageListener) { return openStream(payload, messageListener, null); } @Override public ClientStreamChannelContext openStream(byte[] payload, ClientStreamChannelMessageListener messageListener, StreamChannelStateChangeEventHandler<ClientStreamChannel> stateChangeListener) { logger.info("{} createStream() started.", objectUniqName); ClientStreamChannelContext streamChannel = streamChannelManager.openStream(payload, messageListener, stateChangeListener); logger.info("{} createStream() completed.", objectUniqName); return streamChannel; } public void closeAllStreamChannel() { logger.info("{} closeAllStreamChannel() started.", objectUniqName); streamChannelManager.close(); logger.info("{} closeAllStreamChannel() completed.", objectUniqName); } @Override public Map<Object, Object> getChannelProperties() { Map<Object, Object> properties = this.properties.get(); return properties == null ? Collections.emptyMap() : properties; } public boolean setChannelProperties(Map<Object, Object> value) { if (value == null) { return false; } return this.properties.compareAndSet(null, Collections.unmodifiableMap(value)); } @Override public SocketAddress getRemoteAddress() { return channel.getRemoteAddress(); } public ChannelFuture sendClosePacket() { logger.info("{} sendClosePacket() started.", objectUniqName); SocketStateChangeResult stateChangeResult = state.toBeingClose(); if (stateChangeResult.isChange()) { ChannelFuture writeFuture = write0(ServerClosePacket.DEFAULT_SERVER_CLOSE_PACKET, serverCloseWriteListener); logger.info("{} sendClosePacket() completed.", objectUniqName); return writeFuture; } else { logger.info("{} sendClosePacket() failed. Error:{}.", objectUniqName, stateChangeResult); return null; } } @Override public void messageReceived(Object message) { if (!isEnableCommunication()) { // FIXME need change rules. // as-is : do nothing when state is not run. // candidate : close channel when state is not run. logger.warn("{} messageReceived() failed. Error: Illegal state this message({}) will be ignore.", objectUniqName, message); return; } final short packetType = getPacketType(message); switch (packetType) { case PacketType.APPLICATION_SEND: { handleSend((SendPacket) message); return; } case PacketType.APPLICATION_REQUEST: { handleRequest((RequestPacket) message); return; } case PacketType.APPLICATION_RESPONSE: { handleResponse((ResponsePacket) message); return; } case PacketType.APPLICATION_STREAM_CREATE: case PacketType.APPLICATION_STREAM_CLOSE: case PacketType.APPLICATION_STREAM_CREATE_SUCCESS: case PacketType.APPLICATION_STREAM_CREATE_FAIL: case PacketType.APPLICATION_STREAM_RESPONSE: case PacketType.APPLICATION_STREAM_PING: case PacketType.APPLICATION_STREAM_PONG: handleStreamEvent((StreamPacket) message); return; case PacketType.CONTROL_HANDSHAKE: handleHandshake((ControlHandshakePacket) message); return; case PacketType.CONTROL_CLIENT_CLOSE: { handleClosePacket(channel); return; } case PacketType.CONTROL_PING_PAYLOAD: { handlePingPacket(channel, (PingPayloadPacket) message); return; } case PacketType.CONTROL_PING: { handlePingPacket(channel, (PingPacket) message); return; } default: { logger.warn("invalid messageReceived msg:{}, connection:{}", message, channel); } } } private short getPacketType(Object packet) { if (packet == null) { return PacketType.UNKNOWN; } if (packet instanceof Packet) { return ((Packet) packet).getPacketType(); } return PacketType.UNKNOWN; } private void handleSend(SendPacket sendPacket) { messageListener.handleSend(sendPacket, this); } private void handleRequest(RequestPacket requestPacket) { messageListener.handleRequest(requestPacket, this); } private void handleResponse(ResponsePacket responsePacket) { this.requestManager.messageReceived(responsePacket, this); } private void handleStreamEvent(StreamPacket streamPacket) { streamChannelManager.messageReceived(streamPacket); } private void handleHandshake(ControlHandshakePacket handshakePacket) { int requestId = handshakePacket.getRequestId(); Map<Object, Object> handshakeData = decodeHandshakePacket(handshakePacket); logger.info("{} handleHandshake() started. requestId:{}, data:{}", objectUniqName, requestId, handshakeData); HandshakeResponseCode responseCode = messageListener.handleHandshake(handshakeData); boolean isFirst = setChannelProperties(handshakeData); if (isFirst) { if (HandshakeResponseCode.DUPLEX_COMMUNICATION == responseCode) { this.remoteClusterOption = getClusterOption(handshakeData); state.toRunDuplex(); } else if (HandshakeResponseCode.SIMPLEX_COMMUNICATION == responseCode || HandshakeResponseCode.SUCCESS == responseCode) { state.toRunSimplex(); } } Map<String, Object> responseData = createHandshakeResponse(responseCode, isFirst); sendHandshakeResponse0(requestId, responseData); logger.info("{} handleHandshake() completed(isFirst:{}). requestId:{}, responseCode:{}", objectUniqName, isFirst, requestId, responseCode); } private ClusterOption getClusterOption(Map handshakeResponse) { if (handshakeResponse == Collections.EMPTY_MAP) { return ClusterOption.DISABLE_CLUSTER_OPTION; } Map cluster = (Map) handshakeResponse.get(ControlHandshakeResponsePacket.CLUSTER); if (cluster == null) { return ClusterOption.DISABLE_CLUSTER_OPTION; } String id = MapUtils.getString(cluster, "id", ""); List<Role> roles = getRoles((List) cluster.get("roles")); if (StringUtils.isEmpty(id)) { return ClusterOption.DISABLE_CLUSTER_OPTION; } else { return new ClusterOption(true, id, roles); } } private List<Role> getRoles(List roleNames) { List<Role> roles = new ArrayList<Role>(); for (Object roleName : roleNames) { if (roleName instanceof String && StringUtils.hasLength((String) roleName)) { roles.add(Role.getValue((String) roleName)); } } return roles; } private void handleClosePacket(Channel channel) { logger.info("{} handleClosePacket() started.", objectUniqName); SocketStateChangeResult stateChangeResult = state.toBeingCloseByPeer(); if (!stateChangeResult.isChange()) { logger.info("{} handleClosePacket() failed. Error: {}", objectUniqName, stateChangeResult); } else { logger.info("{} handleClosePacket() completed.", objectUniqName); } } private void handlePingPacket(Channel channel, PingPacket packet) { logger.debug("{} handleLegacyPingPacket() started. packet:{}", objectUniqName, packet); if (healthCheckStateContext.getState() == HealthCheckState.WAIT) { healthCheckStateContext.toReceivedLegacy(); } // packet without status value if (packet == PingPacket.PING_PACKET) { writePong(channel); return; } PingPayloadPacket pingPayloadPacket = new PingPayloadPacket(packet.getPingId(), packet.getStateVersion(), packet.getStateCode()); handlePingPacket0(channel, pingPayloadPacket); } private void handlePingPacket(Channel channel, PingPayloadPacket packet) { logger.debug("{} handlePingPacket() started. packet:{}", objectUniqName, packet); if (healthCheckStateContext.getState() == HealthCheckState.WAIT) { healthCheckStateContext.toReceived(); } handlePingPacket0(channel, packet); } private void handlePingPacket0(Channel channel, PingPayloadPacket packet) { SocketStateCode statusCode = state.getCurrentStateCode(); if (statusCode.getId() == packet.getStateCode()) { stateChecker.unmark(); messageListener.handlePing(packet, this); writePong(channel); } else { logger.warn("Session state sync failed. channel:{}, packet:{}, server-state:{}", channel, packet, statusCode); if (stateChecker.markAndCheckCondition()) { state.toErrorSyncStateSession(); stop(); } else { writePong(channel); } } } private void writePong(Channel channel) { write0(PongPacket.PONG_PACKET, pongWriteFutureListener); } private Map<String, Object> createHandshakeResponse(HandshakeResponseCode responseCode, boolean isFirst) { HandshakeResponseCode createdCode = null; if (isFirst) { createdCode = responseCode; } else { if (HandshakeResponseCode.DUPLEX_COMMUNICATION == responseCode) { createdCode = HandshakeResponseCode.ALREADY_DUPLEX_COMMUNICATION; } else if (HandshakeResponseCode.SIMPLEX_COMMUNICATION == responseCode) { createdCode = HandshakeResponseCode.ALREADY_SIMPLEX_COMMUNICATION; } else { createdCode = responseCode; } } Map<String, Object> result = new HashMap<String, Object>(); result.put(ControlHandshakeResponsePacket.CODE, createdCode.getCode()); result.put(ControlHandshakeResponsePacket.SUB_CODE, createdCode.getSubCode()); if (localClusterOption.isEnable()) { result.put(ControlHandshakeResponsePacket.CLUSTER, localClusterOption.getProperties()); } return result; } private void sendHandshakeResponse0(int requestId, Map<String, Object> data) { try { byte[] resultPayload = ControlMessageEncodingUtils.encode(data); ControlHandshakeResponsePacket packet = new ControlHandshakeResponsePacket(requestId, resultPayload); write0(packet); } catch (ProtocolException e) { logger.warn(e.getMessage(), e); } } private Map<Object, Object> decodeHandshakePacket(ControlHandshakePacket message) { try { byte[] payload = message.getPayload(); Map<Object, Object> properties = (Map) ControlMessageEncodingUtils.decode(payload); return properties; } catch (ProtocolException e) { logger.warn(e.getMessage(), e); } return Collections.EMPTY_MAP; } public boolean isEnableCommunication() { return state.isEnableCommunication(); } public boolean isEnableDuplexCommunication() { return state.isEnableDuplexCommunication(); } String getObjectUniqName() { return objectUniqName; } @Override public ClusterOption getLocalClusterOption() { return localClusterOption; } @Override public ClusterOption getRemoteClusterOption() { return remoteClusterOption; } @Override public long getStartTimestamp() { return startTimestamp; } @Override public HealthCheckState getHealthCheckState() { return healthCheckStateContext.getState(); } @Override public SocketStateCode getCurrentStateCode() { return state.getCurrentStateCode(); } @Override public void close() { stop(); } @Override public String toString() { StringBuilder log = new StringBuilder(32); log.append(objectUniqName); log.append("("); log.append("remote:"); log.append(getRemoteAddress()); log.append(", state:"); log.append(getCurrentStateCode()); log.append(", healthCheckState:"); log.append(getHealthCheckState()); log.append(")"); return log.toString(); } }
/* * Copyright 2012 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.apps.iosched.util; import android.annotation.TargetApi; import android.app.Activity; import android.app.ActivityManager; import android.content.Context; import android.graphics.Bitmap; import android.graphics.Bitmap.CompressFormat; import android.graphics.BitmapFactory; import android.os.Build; import android.os.Bundle; import android.os.Environment; import android.os.StatFs; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentActivity; import android.support.v4.app.FragmentManager; import android.support.v4.util.LruCache; import at.devfest.app.BuildConfig; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import static com.google.android.apps.iosched.util.LogUtils.LOGD; import static com.google.android.apps.iosched.util.LogUtils.LOGE; import static com.google.android.apps.iosched.util.LogUtils.LOGV; import static com.google.android.apps.iosched.util.LogUtils.makeLogTag; /** * This class holds our bitmap caches (memory and disk). */ public class ImageCache { private static final String TAG = makeLogTag(ImageCache.class); // Default memory cache size private static final int DEFAULT_MEM_CACHE_SIZE = 1024 * 1024 * 2; // 2MB private static final int DEFAULT_MEM_CACHE_DIVIDER = 8; // memory class/this = mem cache size // Default disk cache size private static final int DEFAULT_DISK_CACHE_SIZE = 1024 * 1024 * 10; // 10MB // Compression settings when writing images to disk cache private static final CompressFormat DEFAULT_COMPRESS_FORMAT = CompressFormat.JPEG; private static final int DEFAULT_COMPRESS_QUALITY = 75; private static final String CACHE_FILENAME_PREFIX = "cache_"; private static final int ICS_DISK_CACHE_INDEX = 0; // Constants to easily toggle various caches private static final boolean DEFAULT_MEM_CACHE_ENABLED = true; private static final boolean DEFAULT_DISK_CACHE_ENABLED = true; private static final boolean DEFAULT_CLEAR_DISK_CACHE_ON_START = false; private LruCache<String, Bitmap> mMemoryCache; private ICSDiskLruCache mICSDiskCache; private ImageCacheParams mCacheParams; private boolean mPauseDiskAccess = false; /** * Creating a new ImageCache object using the specified parameters. * * @param context The context to use * @param cacheParams The cache parameters to use to initialize the cache */ public ImageCache(Context context, ImageCacheParams cacheParams) { init(context, cacheParams); } /** * Creating a new ImageCache object using the default parameters. * * @param context The context to use * @param uniqueName A unique name that will be appended to the cache directory */ public ImageCache(Context context, String uniqueName) { init(context, new ImageCacheParams(context, uniqueName)); } /** * Find and return an existing ImageCache stored in a {@link RetainFragment}, if not found a new * one is created with defaults and saved to a {@link RetainFragment}. * * @param activity The calling {@link FragmentActivity} * @param uniqueName A unique name to append to the cache directory * @return An existing retained ImageCache object or a new one if one did not exist. */ public static ImageCache findOrCreateCache( final FragmentActivity activity, final String uniqueName) { return findOrCreateCache(activity, new ImageCacheParams(activity, uniqueName)); } /** * Find and return an existing ImageCache stored in a {@link RetainFragment}, if not found a new * one is created using the supplied params and saved to a {@link RetainFragment}. * * @param activity The calling {@link FragmentActivity} * @param cacheParams The cache parameters to use if creating the ImageCache * @return An existing retained ImageCache object or a new one if one did not exist */ public static ImageCache findOrCreateCache( final FragmentActivity activity, ImageCacheParams cacheParams) { // Search for, or create an instance of the non-UI RetainFragment final RetainFragment mRetainFragment = findOrCreateRetainFragment( activity.getSupportFragmentManager()); // See if we already have an ImageCache stored in RetainFragment ImageCache imageCache = (ImageCache) mRetainFragment.getObject(); // No existing ImageCache, create one and store it in RetainFragment if (imageCache == null) { imageCache = new ImageCache(activity, cacheParams); mRetainFragment.setObject(imageCache); } return imageCache; } /** * Initialize the cache, providing all parameters. * * @param context The context to use * @param cacheParams The cache parameters to initialize the cache */ private void init(Context context, ImageCacheParams cacheParams) { mCacheParams = cacheParams; final File diskCacheDir = getDiskCacheDir(context, cacheParams.uniqueName); if (cacheParams.diskCacheEnabled) { if (!diskCacheDir.exists()) { diskCacheDir.mkdir(); } if (getUsableSpace(diskCacheDir) > cacheParams.diskCacheSize) { try { mICSDiskCache = ICSDiskLruCache.open( diskCacheDir, 1, 1, cacheParams.diskCacheSize); } catch (final IOException e) { LOGE(TAG, "init - " + e); } } } // Set up memory cache if (cacheParams.memoryCacheEnabled) { mMemoryCache = new LruCache<String, Bitmap>(cacheParams.memCacheSize) { /** * Measure item size in bytes rather than units which is more practical for a bitmap * cache */ @Override protected int sizeOf(String key, Bitmap bitmap) { return getBitmapSize(bitmap); } }; } } public synchronized void addBitmapToCache(String data, Bitmap bitmap) { if (data == null || bitmap == null) { return; } // Add to memory cache if (mMemoryCache != null && mMemoryCache.get(data) == null) { mMemoryCache.put(data, bitmap); } // Add to disk cache if (mICSDiskCache != null) { final String key = hashKeyForDisk(data); try { if (mICSDiskCache.get(key) == null) { final ICSDiskLruCache.Editor editor = mICSDiskCache.edit(key); if (editor != null) { final OutputStream out = editor.newOutputStream(ICS_DISK_CACHE_INDEX); bitmap.compress( mCacheParams.compressFormat, mCacheParams.compressQuality, out); editor.commit(); } } } catch (final IOException e) { LOGE(TAG, "addBitmapToCache - " + e); } } } /** * Get from memory cache. * * @param data Unique identifier for which item to get * @return The bitmap if found in cache, null otherwise */ public Bitmap getBitmapFromMemCache(String data) { if (mMemoryCache != null) { final Bitmap memBitmap = mMemoryCache.get(data); if (memBitmap != null) { if (BuildConfig.DEBUG) { LOGD(TAG, "Memory cache hit"); } return memBitmap; } } return null; } /** * Get from disk cache. * * @param data Unique identifier for which item to get * @return The bitmap if found in cache, null otherwise */ public Bitmap getBitmapFromDiskCache(String data) { final String key = hashKeyForDisk(data); if (mICSDiskCache != null) { try { final ICSDiskLruCache.Snapshot snapshot = mICSDiskCache.get(key); if (snapshot != null) { LOGV(TAG, "ICS disk cache hit"); while (mPauseDiskAccess) {} return BitmapFactory.decodeStream( snapshot.getInputStream(ICS_DISK_CACHE_INDEX)); } } catch (final IOException e) { LOGE(TAG, "getBitmapFromDiskCache - " + e); } } return null; } public void close() { if (mICSDiskCache != null) { try { if (!mICSDiskCache.isClosed()) { // Should really close() here but need to synchronize up other methods that // access mICSDiskCache first. mICSDiskCache.flush(); } } catch (IOException ignored) { } } } public void clearCaches() { try { if (mICSDiskCache != null) { mICSDiskCache.delete(); } } catch (IOException e) { LOGE(TAG, "clearCaches() - " + e); } mMemoryCache.evictAll(); } public void setPauseDiskCache(boolean pause) { mPauseDiskAccess = pause; } /** * A holder class that contains cache parameters. */ public static class ImageCacheParams { public String uniqueName; public int memCacheSize = DEFAULT_MEM_CACHE_SIZE; public long diskCacheSize = DEFAULT_DISK_CACHE_SIZE; public CompressFormat compressFormat = DEFAULT_COMPRESS_FORMAT; public int compressQuality = DEFAULT_COMPRESS_QUALITY; public boolean memoryCacheEnabled = DEFAULT_MEM_CACHE_ENABLED; public boolean diskCacheEnabled = DEFAULT_DISK_CACHE_ENABLED; public boolean clearDiskCacheOnStart = DEFAULT_CLEAR_DISK_CACHE_ON_START; public String cacheFilenamePrefix = CACHE_FILENAME_PREFIX; public int memoryClass = 0; public ImageCacheParams(String uniqueName) { this.uniqueName = uniqueName; } public ImageCacheParams(Context context, String uniqueName) { this.uniqueName = uniqueName; final ActivityManager activityManager = (ActivityManager) context.getSystemService(Activity.ACTIVITY_SERVICE); memoryClass = activityManager.getMemoryClass(); memCacheSize = memoryClass / DEFAULT_MEM_CACHE_DIVIDER * 1024 * 1024; } } /** * Get the size in bytes of a bitmap. */ @TargetApi(Build.VERSION_CODES.HONEYCOMB_MR1) public static int getBitmapSize(Bitmap bitmap) { if (UIUtils.hasHoneycombMR1()) { return bitmap.getByteCount(); } // Pre HC-MR1 return bitmap.getRowBytes() * bitmap.getHeight(); } /** * Get a usable cache directory (external if available, internal otherwise). * * @param context The context to use * @param uniqueName A unique directory name to append to the cache dir * @return The cache dir */ public static File getDiskCacheDir(Context context, String uniqueName) { // Check if media is mounted or storage is built-in, if so, try and use external cache dir // otherwise use internal cache dir final String cachePath = context.getCacheDir().getPath(); return new File(cachePath + File.separator + uniqueName); } /** * Check how much usable space is available at a given path. * * @param path The path to check * @return The space available in bytes */ @TargetApi(Build.VERSION_CODES.GINGERBREAD) public static long getUsableSpace(File path) { if (UIUtils.hasGingerbread()) { return path.getUsableSpace(); } final StatFs stats = new StatFs(path.getPath()); return (long) stats.getBlockSize() * (long) stats.getAvailableBlocks(); } /** * A hashing method that changes a string (like a URL) into a hash suitable for using as a * disk filename. */ public static String hashKeyForDisk(String key) { String cacheKey; try { final MessageDigest mDigest = MessageDigest.getInstance("SHA-1"); mDigest.update(key.getBytes()); cacheKey = bytesToHexString(mDigest.digest()); } catch (NoSuchAlgorithmException e) { cacheKey = String.valueOf(key.hashCode()); } return cacheKey; } private static String bytesToHexString(byte[] bytes) { // http://stackoverflow.com/questions/332079 StringBuilder sb = new StringBuilder(); for (int i = 0; i < bytes.length; i++) { String hex = Integer.toHexString(0xFF & bytes[i]); if (hex.length() == 1) { sb.append('0'); } sb.append(hex); } return sb.toString(); } /** * Locate an existing instance of this Fragment or if not found, create and * add it using FragmentManager. * * @param fm The FragmentManager manager to use. * @return The existing instance of the Fragment or the new instance if just * created. */ public static RetainFragment findOrCreateRetainFragment(FragmentManager fm) { // Check to see if we have retained the worker fragment. RetainFragment mRetainFragment = (RetainFragment) fm.findFragmentByTag(TAG); // If not retained (or first time running), we need to create and add // it. if (mRetainFragment == null) { mRetainFragment = new RetainFragment(); fm.beginTransaction().add(mRetainFragment, TAG).commit(); } return mRetainFragment; } /** * A simple non-UI Fragment that stores a single Object and is retained over * configuration changes. In this sample it will be used to retain the * ImageCache object. */ public static class RetainFragment extends Fragment { private Object mObject; /** * Empty constructor as per the Fragment documentation */ public RetainFragment() { } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Make sure this Fragment is retained over a configuration change setRetainInstance(true); } /** * Store a single object in this Fragment. * * @param object The object to store */ public void setObject(Object object) { mObject = object; } /** * Get the stored object. * * @return The stored object */ public Object getObject() { return mObject; } } }
/* * Copyright 2002-2007 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.springframework.richclient.application.statusbar.support; import java.awt.BorderLayout; import java.awt.Dimension; import java.awt.Insets; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import javax.swing.BorderFactory; import javax.swing.Icon; import javax.swing.JButton; import javax.swing.JComponent; import javax.swing.JPanel; import javax.swing.JProgressBar; import javax.swing.UIManager; import javax.swing.border.BevelBorder; import javax.swing.border.Border; import org.springframework.richclient.application.ApplicationServicesLocator; import org.springframework.richclient.factory.AbstractControlFactory; import org.springframework.richclient.image.IconSource; import org.springframework.richclient.progress.ProgressMonitor; import org.springframework.util.StringUtils; /** * <code>ProgressMonitor</code> implementation that handles its own controls: * <ul> * <li>a <code>JProgressBar</code> to show the progress to the user</li> * <li>optionally a <code>JButton</code> to allow the user to cancel the * current task</li> * </ul> * <p> * Initally the progress bar and button are hidden, and shown when a task is * running longer than the <code>delayProgress</code> property (default is 500 * ms). * * @author Peter De Bruycker */ public class StatusBarProgressMonitor extends AbstractControlFactory implements ProgressMonitor { /** Progress bar creation is delayed by this ms */ public static final int DEFAULT_DELAY_PROGRESS = 500; public static final int UNKNOWN = -1; private JButton cancelButton; private boolean cancelEnabled = true; private Icon cancelIcon; private JPanel control; private boolean isCanceled; private JProgressBar progressBar; private long startTime; private String taskName; private int delayProgress = DEFAULT_DELAY_PROGRESS; protected JButton createCancelButton() { JButton cancelButton = new JButton(); cancelButton.setBorderPainted(false); cancelButton.setIcon(getCancelIcon()); cancelButton.setMargin(new Insets(0, 0, 0, 0)); return cancelButton; } protected JComponent createControl() { control = new JPanel(new BorderLayout()); cancelButton = createCancelButton(); cancelButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { logger.info("Requesting task cancellation..."); setCanceled(true); } }); progressBar = createProgressBar(); control.add(progressBar); control.add(cancelButton, BorderLayout.LINE_END); Border bevelBorder = BorderFactory.createBevelBorder(BevelBorder.LOWERED, UIManager .getColor("controlHighlight"), UIManager.getColor("controlShadow")); Border emptyBorder = BorderFactory.createEmptyBorder(1, 3, 1, 3); control.setBorder(BorderFactory.createCompoundBorder(bevelBorder, emptyBorder)); // initially hide the control hideProgress(); return control; } protected JProgressBar createProgressBar() { JProgressBar progressBar = new JProgressBar(); progressBar.setPreferredSize(new Dimension(200, 17)); progressBar.setStringPainted(true); return progressBar; } public void done() { startTime = 0; if (progressBar != null) { progressBar.setValue(progressBar.getMaximum()); progressBar.setString(""); } hideProgress(); } public Icon getCancelIcon() { if (cancelIcon == null) { cancelIcon = ((IconSource) ApplicationServicesLocator.services() .getService(IconSource.class)).getIcon("cancel.icon"); } return cancelIcon; } protected JProgressBar getProgressBar() { return progressBar; } private void hideButton() { cancelButton.setEnabled(cancelEnabled); cancelButton.setVisible(false); } protected void hideProgress() { if (progressBar.isVisible()) { progressBar.setVisible(false); cancelButton.setVisible(false); } } public boolean isCanceled() { return isCanceled; } public void setCanceled(boolean b) { isCanceled = b; cancelButton.setEnabled(!b); } public void setCancelEnabled(boolean enabled) { cancelEnabled = enabled; if (progressBar.isVisible() && !cancelButton.isVisible() && enabled) { showButton(); } else { hideButton(); } } public void setCancelIcon(Icon icon) { cancelIcon = icon; if (cancelButton != null) { cancelButton.setIcon(icon); } } private void showButton() { cancelButton.setEnabled(cancelEnabled); cancelButton.setVisible(true); } private void showProgress() { if (!progressBar.isVisible()) { if (cancelEnabled) { showButton(); } progressBar.setVisible(true); } } public void subTaskStarted(String name) { String text; if (name.length() == 0) { text = name; } else { if (StringUtils.hasText(taskName)) { text = taskName + " - " + name; } else { text = name; } } progressBar.setString(text); } public void taskStarted(String name, int totalWork) { startTime = System.currentTimeMillis(); isCanceled = false; if (totalWork == UNKNOWN) { progressBar.setIndeterminate(true); } else { progressBar.setIndeterminate(false); progressBar.setMaximum(totalWork); progressBar.setValue(0); } taskName = name; progressBar.setString(taskName); showProgress(); } public void worked(int work) { if (!progressBar.isVisible()) { if ((System.currentTimeMillis() - startTime) > delayProgress) { control.setVisible(true); } } progressBar.setValue((int) work); if (progressBar.isStringPainted()) { progressBar.setString(((int) work) + "%"); } } public void setDelayProgress(int delayProgress) { this.delayProgress = delayProgress; } }
package org.moparscape.elysium.entity.component; import org.moparscape.elysium.entity.MobileEntity; import org.moparscape.elysium.entity.Path; import org.moparscape.elysium.world.Point; import org.moparscape.elysium.world.Region; import org.moparscape.elysium.world.TileValue; import org.moparscape.elysium.world.World; /** * Created by IntelliJ IDEA. * * @author lothy */ public final class Movement { private int curWaypoint; private boolean hasMoved = false; private Point location; private MobileEntity owner; private Path path; private Region region; public Movement(MobileEntity owner) { this.owner = owner; } protected boolean atStart() { Point loc = owner.getLocation(); return loc.getX() == path.getStartX() && loc.getY() == path.getStartY(); } /** * Checks if we are at the given waypoint */ protected boolean atWaypoint(int waypoint) { Point loc = owner.getLocation(); return path.getWaypointX(waypoint) == loc.getX() && path.getWaypointY(waypoint) == loc.getY(); } private int[] cancelCoords() { resetPath(); return new int[]{-1, -1}; } /** * Checks if we have reached the end of our path */ public boolean finishedPath() { if (path == null) { return true; } if (path.getPathLength() > 0) { return atWaypoint(path.getPathLength() - 1); } else { return atStart(); } } public Point getLocation() { return location; } public void setLocation(Point location) { this.setLocation(location, false); } /** * Gets the next coordinate in the right direction */ protected int[] getNextCoords(int startX, int destX, int startY, int destY) { int[] coords = {startX, startY}; boolean myXBlocked = false, myYBlocked = false, newXBlocked = false, newYBlocked = false; if (startX > destX) { myXBlocked = isBlocking(startX - 1, startY, 8); // Check right tiles left wall coords[0] = startX - 1; } else if (startX < destX) { myXBlocked = isBlocking(startX + 1, startY, 2); // Check left tiles right wall coords[0] = startX + 1; } if (startY > destY) { myYBlocked = isBlocking(startX, startY - 1, 4); // Check top tiles bottom wall coords[1] = startY - 1; } else if (startY < destY) { myYBlocked = isBlocking(startX, startY + 1, 1); // Check bottom tiles top wall coords[1] = startY + 1; } // If both directions are blocked OR we are going straight and the direction is blocked if ((myXBlocked && myYBlocked) || (myXBlocked && startY == destY) || (myYBlocked && startX == destX)) { return cancelCoords(); } if (coords[0] > startX) { newXBlocked = isBlocking(coords[0], coords[1], 2); // Check dest tiles right wall } else if (coords[0] < startX) { newXBlocked = isBlocking(coords[0], coords[1], 8); // Check dest tiles left wall } if (coords[1] > startY) { newYBlocked = isBlocking(coords[0], coords[1], 1); // Check dest tiles top wall } else if (coords[1] < startY) { newYBlocked = isBlocking(coords[0], coords[1], 4); // Check dest tiles bottom wall } // If both directions are blocked OR we are going straight and the direction is blocked if ((newXBlocked && newYBlocked) || (newXBlocked && startY == coords[1]) || (myYBlocked && startX == coords[0])) { return cancelCoords(); } // If only one direction is blocked, but it blocks both tiles if ((myXBlocked && newXBlocked) || (myYBlocked && newYBlocked)) { return cancelCoords(); } return coords; } public boolean hasMoved() { return hasMoved; } private boolean isBlocking(int x, int y, int bit) { World world = World.getInstance(); TileValue t = world.getTileValue(x, y); return isBlocking(t.mapValue, (byte) bit) || isBlocking(t.objectValue, (byte) bit); } private boolean isBlocking(byte val, byte bit) { // 0x70, or 112, is 16 OR 32 OR 64. If ANDing this mask with the val != 0 then the tile is blocked int mask = 0x70 | bit; return (val & mask) != 0; } public void resetMoved() { this.hasMoved = false; } /** * Resets the path (stops movement) */ public void resetPath() { path = null; curWaypoint = -1; } public void setLocation(Point location, boolean teleport) { if (!teleport) { hasMoved = true; owner.updateSprite(location); } Region newRegion = Region.getRegion(location); Region oldRegion = region; owner.updateRegion(oldRegion, newRegion); this.location = location; this.region = newRegion; } /** * Updates our position to the next in the path */ protected void setNextPosition() { Point loc = owner.getLocation(); int[] newCoords = {-1, -1}; if (curWaypoint == -1) { if (atStart()) { curWaypoint = 0; } else { newCoords = getNextCoords(loc.getX(), path.getStartX(), loc.getY(), path.getStartY()); } } if (curWaypoint > -1) { if (atWaypoint(curWaypoint)) { curWaypoint++; } if (curWaypoint < path.getPathLength()) { newCoords = getNextCoords(loc.getX(), path.getWaypointX(curWaypoint), loc.getY(), path.getWaypointY(curWaypoint)); } else { resetPath(); } } if (newCoords[0] > -1 && newCoords[1] > -1) { setLocation(new Point(newCoords[0], newCoords[1])); } } public void setPath(Path path) { this.curWaypoint = -1; this.path = path; } /** * Updates the point in the path to the next one * assuming we are not finished */ public void updatePosition() { if (!finishedPath()) { setNextPosition(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.generator.swagger; import java.io.IOException; import java.time.Instant; import java.util.function.Function; import java.util.stream.Collector; import javax.annotation.Generated; import javax.lang.model.element.Modifier; import com.squareup.javapoet.AnnotationSpec; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.JavaFile; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.TypeSpec; import io.swagger.models.Info; import io.swagger.models.Swagger; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.util.ObjectHelper; import static org.apache.camel.util.StringHelper.notEmpty; /** * Generates Java source code */ public abstract class RestDslSourceCodeGenerator<T> extends RestDslGenerator<RestDslSourceCodeGenerator<T>> { static final String DEFAULT_CLASS_NAME = "RestDslRoute"; static final String DEFAULT_PACKAGE_NAME = "rest.dsl.generated"; private static final String DEFAULT_INDENT = " "; private Function<Swagger, String> classNameGenerator = RestDslSourceCodeGenerator::generateClassName; private Instant generated = Instant.now(); private String indent = DEFAULT_INDENT; private Function<Swagger, String> packageNameGenerator = RestDslSourceCodeGenerator::generatePackageName; private boolean sourceCodeTimestamps; RestDslSourceCodeGenerator(final Swagger swagger) { super(swagger); } public abstract void generate(T destination) throws IOException; public RestDslSourceCodeGenerator<T> withClassName(final String className) { notEmpty(className, "className"); this.classNameGenerator = (s) -> className; return this; } public RestDslSourceCodeGenerator<T> withIndent(final String indent) { this.indent = ObjectHelper.notNull(indent, "indent"); return this; } public RestDslSourceCodeGenerator<T> withoutSourceCodeTimestamps() { sourceCodeTimestamps = false; return this; } public RestDslSourceCodeGenerator<T> withPackageName(final String packageName) { notEmpty(packageName, "packageName"); this.packageNameGenerator = (s) -> packageName; return this; } public RestDslSourceCodeGenerator<T> withSourceCodeTimestamps() { sourceCodeTimestamps = true; return this; } MethodSpec generateConfigureMethod(final Swagger swagger) { final MethodSpec.Builder configure = MethodSpec.methodBuilder("configure").addModifiers(Modifier.PUBLIC) .returns(void.class).addJavadoc("Defines Apache Camel routes using REST DSL fluent API.\n"); final MethodBodySourceCodeEmitter emitter = new MethodBodySourceCodeEmitter(configure); if (restComponent != null) { configure.addCode("\n"); configure.addCode("restConfiguration().component(\"" + restComponent + "\")"); if (restContextPath != null) { configure.addCode(".contextPath(\"" + restContextPath + "\")"); } if (ObjectHelper.isNotEmpty(apiContextPath)) { configure.addCode(".apiContextPath(\"" + apiContextPath + "\")"); } configure.addCode(";\n\n"); } final PathVisitor<MethodSpec> restDslStatement = new PathVisitor<>(swagger.getBasePath(), emitter, filter, destinationGenerator()); swagger.getPaths().forEach(restDslStatement::visit); return emitter.result(); } Instant generated() { return generated; } JavaFile generateSourceCode() { final MethodSpec methodSpec = generateConfigureMethod(swagger); final String classNameToUse = classNameGenerator.apply(swagger); final AnnotationSpec.Builder generatedAnnotation = AnnotationSpec.builder(Generated.class).addMember("value", "$S", getClass().getName()); if (sourceCodeTimestamps) { generatedAnnotation.addMember("date", "$S", generated()); } TypeSpec.Builder builder = TypeSpec.classBuilder(classNameToUse).superclass(RouteBuilder.class) .addModifiers(Modifier.PUBLIC, Modifier.FINAL).addMethod(methodSpec) .addAnnotation(generatedAnnotation.build()) .addJavadoc("Generated from Swagger specification by Camel REST DSL generator.\n"); if (springComponent) { final AnnotationSpec.Builder springAnnotation = AnnotationSpec.builder(ClassName.bestGuess("org.springframework.stereotype.Component")); builder.addAnnotation(springAnnotation.build()); } TypeSpec generatedRouteBuilder = builder.build(); final String packageNameToUse = packageNameGenerator.apply(swagger); return JavaFile.builder(packageNameToUse, generatedRouteBuilder).indent(indent).build(); } RestDslSourceCodeGenerator<T> withGeneratedTime(final Instant generated) { this.generated = generated; return this; } static String generateClassName(final Swagger swagger) { final Info info = swagger.getInfo(); if (info == null) { return DEFAULT_CLASS_NAME; } final String title = info.getTitle(); if (title == null) { return DEFAULT_CLASS_NAME; } final String className = title.chars().filter(Character::isJavaIdentifierPart).filter(c -> c < 'z').boxed() .collect(Collector.of(StringBuilder::new, StringBuilder::appendCodePoint, StringBuilder::append, StringBuilder::toString)); if (className.isEmpty() || !Character.isJavaIdentifierStart(className.charAt(0))) { return DEFAULT_CLASS_NAME; } return className; } static String generatePackageName(final Swagger swagger) { final String host = swagger.getHost(); if (ObjectHelper.isNotEmpty(host)) { final StringBuilder packageName = new StringBuilder(); final String hostWithoutPort = host.replaceFirst(":.*", ""); if ("localhost".equalsIgnoreCase(hostWithoutPort)) { return DEFAULT_PACKAGE_NAME; } final String[] parts = hostWithoutPort.split("\\."); for (int i = parts.length - 1; i >= 0; i--) { packageName.append(parts[i]); if (i != 0) { packageName.append('.'); } } return packageName.toString(); } return DEFAULT_PACKAGE_NAME; } }
/* * Copyright (C) 2016-2017 Lightbend Inc. <https://www.lightbend.com> */ package com.lightbend.lagom.javadsl.pubsub; import com.lightbend.lagom.internal.javadsl.pubsub.PubSubRegistryImpl; import com.lightbend.lagom.javadsl.pubsub.PubSubModule; import com.lightbend.lagom.javadsl.cluster.testkit.ActorSystemModule; import com.google.inject.Guice; import com.google.inject.Injector; import com.typesafe.config.Config; import com.typesafe.config.ConfigFactory; import java.util.Arrays; import java.util.concurrent.TimeUnit; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import scala.concurrent.duration.Duration; import akka.actor.ActorSystem; import akka.cluster.Cluster; import akka.stream.ActorMaterializer; import akka.stream.Materializer; import akka.stream.javadsl.Sink; import akka.stream.javadsl.Source; import akka.stream.testkit.TestSubscriber; import akka.stream.testkit.javadsl.TestSink; import akka.testkit.JavaTestKit; public class PubSubTest { static ActorSystem system; @BeforeClass public static void setup() { Config config = ConfigFactory.parseString( "akka.actor.provider = akka.cluster.ClusterActorRefProvider \n" + "akka.remote.netty.tcp.port = 0 \n" + "akka.remote.netty.tcp.hostname = 127.0.0.1 \n" + "akka.loglevel = INFO \n"); system = ActorSystem.create("PubSubTest", config); Cluster.get(system).join(Cluster.get(system).selfAddress()); } // yeah, the Akka testkit is in need of some Java 8 love private void awaitHasSubscribers(PubSubRef<?> ref, boolean expected) { new JavaTestKit(system) { { new AwaitCond(Duration.create(10, TimeUnit.SECONDS)) { @Override protected boolean cond() { try { return expected == ref.hasAnySubscribers().toCompletableFuture().get(); } catch (Exception e) { return false; } } }; } }; } @AfterClass public static void teardown() { JavaTestKit.shutdownActorSystem(system); system = null; } private final Injector injector = Guice.createInjector(new ActorSystemModule(system), new PubSubModule()); private PubSubRegistry registry() { return injector.getInstance(PubSubRegistry.class); } @Test public void testSimplePubSub() throws Exception { final Materializer mat = ActorMaterializer.create(system); final TopicId<Notification> topic = new TopicId<>(Notification.class, ""); final PubSubRef<Notification> ref = registry().refFor(topic); final Source<Notification, ?> sub = ref.subscriber(); final TestSubscriber.Probe<String> probe = sub .map(notification -> notification.getMsg()) .runWith(TestSink.probe(system), mat) .request(2); awaitHasSubscribers(ref, true); ref.publish(new Notification("hello")); ref.publish(new Notification("hi again")); probe.expectNext("hello"); probe.expectNext("hi again"); } @Test public void testStreamingPublish() throws Exception { final Materializer mat = ActorMaterializer.create(system); final TopicId<Notification> topic = new TopicId<>(Notification.class, "1"); final PubSubRef<Notification> ref = registry().refFor(topic); final Source<Notification, ?> sub = ref.subscriber(); final TestSubscriber.Probe<String> probe = sub.map(notification -> notification.getMsg()) .runWith(TestSink.probe(system), mat).request(2); awaitHasSubscribers(ref, true); final Sink<Notification, ?> pub = ref.publisher(); Source.from( Arrays.asList(new Notification("hello-1"), new Notification("hello-2"), new Notification("hello-3"))) .runWith(pub, mat); probe.expectNext("hello-1"); probe.expectNext("hello-2"); probe.expectNoMsg(Duration.create(100, TimeUnit.MILLISECONDS)); probe.request(10); probe.expectNext("hello-3"); } @Test public void testSubscribeMaterializeTwo() throws Exception { final Materializer mat = ActorMaterializer.create(system); final TopicId<Notification> topic = new TopicId<>(Notification.class, "2"); final PubSubRef<Notification> ref = registry().refFor(topic); final Source<String, ?> src = ref.subscriber() .map(notification -> notification.getMsg()); final TestSubscriber.Probe<String> probe1 = src .runWith(TestSink.probe(system), mat) .request(2); final TestSubscriber.Probe<String> probe2 = src .runWith(TestSink.probe(system), mat) .request(2); awaitHasSubscribers(ref, true); ref.publish(new Notification("hello")); probe1.expectNext("hello"); probe2.expectNext("hello"); } @Test public void testSubscribeMaterializeMoreThanOnce() throws Exception { final Materializer mat = ActorMaterializer.create(system); final TopicId<Notification> topic = new TopicId<>(Notification.class, "3"); final PubSubRef<Notification> ref = registry().refFor(topic); final Source<String, ?> src = ref.subscriber() .map(notification -> notification.getMsg()); final TestSubscriber.Probe<String> probe1 = src.runWith(TestSink.probe(system), mat).request(2); awaitHasSubscribers(ref, true); ref.publish(new Notification("hello")); probe1.expectNext("hello"); probe1.cancel(); final TestSubscriber.Probe<String> probe2 = src.runWith(TestSink.probe(system), mat).request(2); ref.publish(new Notification("hello2")); probe2.expectNext("hello2"); } @Test public void testPublishMaterializeTwo() throws Exception { final Materializer mat = ActorMaterializer.create(system); final TopicId<Notification> topic = new TopicId<>(Notification.class, "4"); final PubSubRef<Notification> ref = registry().refFor(topic); final Source<Notification, ?> sub = ref.subscriber(); TestSubscriber.Probe<String> subProbe = sub.map(notification -> notification.getMsg()) .runWith(TestSink.probe(system), mat).request(10); awaitHasSubscribers(ref, true); final Sink<Notification, ?> pub = ref.publisher(); Source .from(Arrays.asList(new Notification("hello-1a"), new Notification("hello-2a"), new Notification("hello-3a"))) .runWith(pub, mat); Source .from(Arrays.asList(new Notification("hello-1b"), new Notification("hello-2b"), new Notification("hello-3b"))) .runWith(pub, mat); subProbe.expectNextUnordered("hello-1a", "hello-1b", "hello-2a", "hello-2b", "hello-3a", "hello-3b"); } @Test public void testPublishMaterializeMoreThanOnce() throws Exception { final Materializer mat = ActorMaterializer.create(system); final TopicId<Notification> topic = new TopicId<>(Notification.class, "5"); final PubSubRef<Notification> ref = registry().refFor(topic); final Source<Notification, ?> sub = ref.subscriber(); final TestSubscriber.Probe<String> probe1 = sub.map(notification -> notification.getMsg()) .runWith(TestSink.probe(system), mat).request(10); awaitHasSubscribers(ref, true); final Sink<Notification, ?> pub = ref.publisher(); Source .from(Arrays.asList(new Notification("hello-1a"), new Notification("hello-2a"), new Notification("hello-3a"))) .runWith(pub, mat); probe1.expectNext("hello-1a"); probe1.expectNext("hello-2a"); probe1.expectNext("hello-3a"); probe1.cancel(); final TestSubscriber.Probe<String> probe2 = sub.map(notification -> notification.getMsg()) .runWith(TestSink.probe(system), mat).request(10); Source .from(Arrays.asList(new Notification("hello-1b"), new Notification("hello-2b"), new Notification("hello-3b"))) .runWith(pub, mat); probe2.expectNext("hello-1b"); probe2.expectNext("hello-2b"); probe2.expectNext("hello-3b"); } @Test public void testDropOldestWhenBufferOverflow() throws Exception { final Materializer mat = ActorMaterializer.create(system); final TopicId<Notification> topic = new TopicId<>(Notification.class, "7"); final Config conf = ConfigFactory.parseString("subscriber-buffer-size = 3").withFallback( system.settings().config().getConfig("lagom.pubsub")); final PubSubRegistry registry = new PubSubRegistryImpl(system, conf); final PubSubRef<Notification> ref = registry.refFor(topic); final Source<Notification, ?> src = ref.subscriber(); // important to not use any intermediate stages (such as map) here, because then // internal buffering comes into play final TestSubscriber.Probe<Notification> probe = src.runWith(TestSink.probe(system), mat).request(2); awaitHasSubscribers(ref, true); for (int i = 1; i <= 10; i++) { ref.publish(new Notification("hello-" + i)); } probe.expectNext(new Notification("hello-1")); probe.expectNext(new Notification("hello-2")); probe.expectNoMsg(Duration.create(1, TimeUnit.SECONDS)); probe.request(100); probe.expectNext(new Notification("hello-8")); probe.expectNext(new Notification("hello-9")); probe.expectNext(new Notification("hello-10")); probe.expectNoMsg(Duration.create(100, TimeUnit.MILLISECONDS)); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.smartdata.hdfs.action; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.smartdata.action.ActionException; import org.smartdata.action.Utils; import org.smartdata.action.annotation.ActionSignature; import org.smartdata.conf.SmartConfKeys; import org.smartdata.hdfs.CompatibilityHelperLoader; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URI; import java.util.Map; /** * An action to copy a single file from src to destination. * If dest doesn't contains "hdfs" prefix, then destination will be set to * current cluster, i.e., copy between dirs in current cluster. * Note that destination should contains filename. */ @ActionSignature( actionId = "copy", displayName = "copy", usage = HdfsAction.FILE_PATH + " $src " + CopyFileAction.DEST_PATH + " $dest " + CopyFileAction.OFFSET_INDEX + " $offset" + CopyFileAction.LENGTH + " $length" + CopyFileAction.BUF_SIZE + " $size" ) public class CopyFileAction extends HdfsAction { private static final Logger LOG = LoggerFactory.getLogger(CopyFileAction.class); public static final String BUF_SIZE = "-bufSize"; public static final String DEST_PATH = "-dest"; public static final String OFFSET_INDEX = "-offset"; public static final String LENGTH = "-length"; private String srcPath; private String destPath; private long offset = 0; private long length = 0; private int bufferSize = 64 * 1024; private Configuration conf; @Override public void init(Map<String, String> args) { try { this.conf = getContext().getConf(); String nameNodeURL = this.conf.get(SmartConfKeys.SMART_DFS_NAMENODE_RPCSERVER_KEY); conf.set(DFSConfigKeys.FS_DEFAULT_NAME_KEY, nameNodeURL); } catch (NullPointerException e) { this.conf = new Configuration(); appendLog("Conf error!, NameNode URL is not configured!"); } super.init(args); this.srcPath = args.get(FILE_PATH); if (args.containsKey(DEST_PATH)) { this.destPath = args.get(DEST_PATH); } if (args.containsKey(BUF_SIZE)) { bufferSize = Integer.valueOf(args.get(BUF_SIZE)); } if (args.containsKey(OFFSET_INDEX)) { offset = Long.valueOf(args.get(OFFSET_INDEX)); } if (args.containsKey(LENGTH)) { length = Long.valueOf(args.get(LENGTH)); } } @Override protected void execute() throws Exception { if (srcPath == null) { throw new IllegalArgumentException("File parameter is missing."); } if (destPath == null) { throw new IllegalArgumentException("Dest File parameter is missing."); } appendLog( String.format("Action starts at %s : Read %s", Utils.getFormatedCurrentTime(), srcPath)); if (!dfsClient.exists(srcPath)) { throw new ActionException("CopyFile Action fails, file doesn't exist!"); } appendLog( String.format("Copy from %s to %s", srcPath, destPath)); if (offset == 0 && length == 0) { copySingleFile(srcPath, destPath); } if (length != 0) { copyWithOffset(srcPath, destPath, bufferSize, offset, length); } appendLog("Copy Successfully!!"); } private boolean copySingleFile(String src, String dest) throws IOException { //get The file size of source file long fileSize = getFileSize(src); appendLog( String.format("Copy the whole file with length %s", fileSize)); return copyWithOffset(src, dest, bufferSize, 0, fileSize); } private boolean copyWithOffset(String src, String dest, int bufferSize, long offset, long length) throws IOException { appendLog( String.format("Copy with offset %s and length %s", offset, length)); InputStream in = null; OutputStream out = null; try { in = getSrcInputStream(src); out = getDestOutPutStream(dest, offset); //skip offset in.skip(offset); byte[] buf = new byte[bufferSize]; long bytesRemaining = length; while (bytesRemaining > 0L) { int bytesToRead = (int) (bytesRemaining < (long) buf.length ? bytesRemaining : (long) buf.length); int bytesRead = in.read(buf, 0, bytesToRead); if (bytesRead == -1) { break; } out.write(buf, 0, bytesRead); bytesRemaining -= (long) bytesRead; } return true; } finally { if (out != null) { out.close(); } if (in != null) { in.close(); } } } private long getFileSize(String fileName) throws IOException { if (fileName.startsWith("hdfs")) { // Get InputStream from URL FileSystem fs = FileSystem.get(URI.create(fileName), conf); return fs.getFileStatus(new Path(fileName)).getLen(); } else { return dfsClient.getFileInfo(fileName).getLen(); } } private InputStream getSrcInputStream(String src) throws IOException { if (src.startsWith("hdfs")) { // Copy between different remote clusters // Get InputStream from URL FileSystem fs = FileSystem.get(URI.create(src), conf); return fs.open(new Path(src)); } else { return dfsClient.open(src); } } private OutputStream getDestOutPutStream(String dest, long offset) throws IOException { if (dest.startsWith("hdfs")) { // Copy between different clusters // Copy to remote HDFS // Get OutPutStream from URL FileSystem fs = FileSystem.get(URI.create(dest), conf); int replication = DFSConfigKeys.DFS_REPLICATION_DEFAULT; try { replication = fs.getServerDefaults(new Path(dest)).getReplication(); if (replication != DFSConfigKeys.DFS_REPLICATION_DEFAULT) { appendLog("Remote Replications =" + replication); } } catch (IOException e) { LOG.debug("Get Server default replication error!", e); } if (fs.exists(new Path(dest)) && offset != 0) { appendLog("Append to existing file " + dest); return fs.append(new Path(dest)); } else { return fs.create(new Path(dest), true, (short) replication); } } else if (dest.startsWith("s3")) { // Copy to s3 FileSystem fs = FileSystem.get(URI.create(dest), conf); return fs.create(new Path(dest), true); } else { return CompatibilityHelperLoader.getHelper() .getDFSClientAppend(dfsClient, dest, bufferSize, offset); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.apache.hadoop.hdfs.server.diskbalancer; import com.google.common.base.Preconditions; import com.google.common.base.Supplier; import org.apache.commons.codec.digest.DigestUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.ReconfigurationException; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.StorageType; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSNNTopology; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.server.balancer.TestBalancer; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.DiskBalancer; import org.apache.hadoop.hdfs.server.datanode.DiskBalancer.DiskBalancerMover; import org.apache.hadoop.hdfs.server.datanode.DiskBalancer.VolumePair; import org.apache.hadoop.hdfs.server.datanode.DiskBalancerWorkItem; import org.apache.hadoop.hdfs.server.datanode.DiskBalancerWorkStatus; import org.apache.hadoop.hdfs.server.datanode.DiskBalancerWorkStatus.Result; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsVolumeImpl; import org.apache.hadoop.hdfs.server.diskbalancer.connectors.ClusterConnector; import org.apache.hadoop.hdfs.server.diskbalancer.connectors.ConnectorFactory; import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerCluster; import org.apache.hadoop.hdfs.server.diskbalancer.datamodel.DiskBalancerDataNode; import org.apache.hadoop.hdfs.server.diskbalancer.planner.NodePlan; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Time; import org.junit.Assert; import org.junit.Test; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; /** * Test Disk Balancer. */ public class TestDiskBalancer { private static final String PLAN_FILE = "/system/current.plan.json"; static final Logger LOG = LoggerFactory.getLogger(TestDiskBalancer.class); @Test public void testDiskBalancerNameNodeConnectivity() throws Exception { Configuration conf = new HdfsConfiguration(); conf.setBoolean(DFSConfigKeys.DFS_DISK_BALANCER_ENABLED, true); final int numDatanodes = 2; MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf) .numDataNodes(numDatanodes).build(); try { cluster.waitActive(); ClusterConnector nameNodeConnector = ConnectorFactory.getCluster(cluster.getFileSystem(0).getUri(), conf); DiskBalancerCluster diskBalancerCluster = new DiskBalancerCluster(nameNodeConnector); diskBalancerCluster.readClusterInfo(); assertEquals(diskBalancerCluster.getNodes().size(), numDatanodes); DataNode dnNode = cluster.getDataNodes().get(0); DiskBalancerDataNode dbDnNode = diskBalancerCluster.getNodeByUUID(dnNode.getDatanodeUuid()); assertEquals(dnNode.getDatanodeUuid(), dbDnNode.getDataNodeUUID()); assertEquals(dnNode.getDatanodeId().getIpAddr(), dbDnNode.getDataNodeIP()); assertEquals(dnNode.getDatanodeId().getHostName(), dbDnNode.getDataNodeName()); try (FsDatasetSpi.FsVolumeReferences ref = dnNode.getFSDataset() .getFsVolumeReferences()) { assertEquals(ref.size(), dbDnNode.getVolumeCount()); } // Shutdown the DN first, to verify that calling diskbalancer APIs on // uninitialized DN doesn't NPE dnNode.shutdown(); assertEquals("", dnNode.getDiskBalancerStatus()); } finally { cluster.shutdown(); } } /** * This test simulates a real Data node working with DiskBalancer. * <p> * Here is the overview of this test. * <p> * 1. Write a bunch of blocks and move them to one disk to create imbalance. * 2. Rewrite the capacity of the disks in DiskBalancer Model so that planner * will produce a move plan. 3. Execute the move plan and wait unitl the plan * is done. 4. Verify the source disk has blocks now. * * @throws Exception */ @Test public void testDiskBalancerEndToEnd() throws Exception { Configuration conf = new HdfsConfiguration(); conf.setBoolean(DFSConfigKeys.DFS_DISK_BALANCER_ENABLED, true); final int blockCount = 100; final int blockSize = 1024; final int diskCount = 2; final int dataNodeCount = 1; final int dataNodeIndex = 0; final int sourceDiskIndex = 0; final long cap = blockSize * 2L * blockCount; MiniDFSCluster cluster = new ClusterBuilder() .setBlockCount(blockCount) .setBlockSize(blockSize) .setDiskCount(diskCount) .setNumDatanodes(dataNodeCount) .setConf(conf) .setCapacities(new long[] {cap, cap}) .build(); try { DataMover dataMover = new DataMover(cluster, dataNodeIndex, sourceDiskIndex, conf, blockSize, blockCount); dataMover.moveDataToSourceDisk(); NodePlan plan = dataMover.generatePlan(); dataMover.executePlan(plan); dataMover.verifyPlanExectionDone(); dataMover.verifyAllVolumesHaveData(true); dataMover.verifyTolerance(plan, 0, sourceDiskIndex, 10); } finally { cluster.shutdown(); } } @Test public void testDiskBalancerWithFederatedCluster() throws Exception { Configuration conf = new HdfsConfiguration(); conf.setBoolean(DFSConfigKeys.DFS_DISK_BALANCER_ENABLED, true); final int blockCount = 100; final int blockSize = 1024; final int diskCount = 2; final int dataNodeCount = 1; final int dataNodeIndex = 0; final int sourceDiskIndex = 0; final long cap = blockSize * 3L * blockCount; conf.setInt(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize); conf.setInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, blockSize); final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf) .nnTopology(MiniDFSNNTopology.simpleFederatedTopology(2)) .numDataNodes(dataNodeCount) .storagesPerDatanode(diskCount) .storageCapacities(new long[] {cap, cap}) .build(); cluster.waitActive(); DFSTestUtil.setFederatedConfiguration(cluster, conf); final String fileName = "/tmp.txt"; final Path filePath = new Path(fileName); long fileLen = blockCount * blockSize; FileSystem fs = cluster.getFileSystem(0); TestBalancer.createFile(cluster, filePath, fileLen, (short) 1, 0); DFSTestUtil.waitReplication(fs, filePath, (short) 1); fs = cluster.getFileSystem(1); TestBalancer.createFile(cluster, filePath, fileLen, (short) 1, 1); DFSTestUtil.waitReplication(fs, filePath, (short) 1); try { DataMover dataMover = new DataMover(cluster, dataNodeIndex, sourceDiskIndex, conf, blockSize, blockCount); dataMover.moveDataToSourceDisk(); NodePlan plan = dataMover.generatePlan(); dataMover.executePlan(plan); dataMover.verifyPlanExectionDone(); dataMover.verifyAllVolumesHaveData(true); dataMover.verifyTolerance(plan, 0, sourceDiskIndex, 10); } finally { cluster.shutdown(); } } @Test public void testDiskBalancerComputeDelay() throws Exception { Configuration conf = new HdfsConfiguration(); conf.setBoolean(DFSConfigKeys.DFS_DISK_BALANCER_ENABLED, true); final int blockCount = 100; final int blockSize = 11 * 1024 * 1024; final int diskCount = 2; final int dataNodeCount = 1; final int dataNodeIndex = 0; final long cap = blockSize * 2L * blockCount; conf.setInt(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize); conf.setInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, blockSize); final MiniDFSCluster cluster = new ClusterBuilder() .setBlockCount(blockCount).setBlockSize(blockSize) .setDiskCount(diskCount).setNumDatanodes(dataNodeCount).setConf(conf) .setCapacities(new long[] {cap, cap }).build(); try { DataNode node = cluster.getDataNodes().get(dataNodeIndex); final FsDatasetSpi<?> fsDatasetSpy = Mockito.spy(node.getFSDataset()); DiskBalancerWorkItem item = Mockito.spy(new DiskBalancerWorkItem()); // Mocking bandwidth as 10mb/sec. Mockito.doReturn((long) 10).when(item).getBandwidth(); doAnswer(new Answer<Object>() { public Object answer(InvocationOnMock invocation) { try { node.getFSDataset().moveBlockAcrossVolumes( (ExtendedBlock) invocation.getArguments()[0], (FsVolumeSpi) invocation.getArguments()[1]); } catch (Exception e) { LOG.error(e.getMessage()); } return null; } }).when(fsDatasetSpy).moveBlockAcrossVolumes(any(ExtendedBlock.class), any(FsVolumeSpi.class)); DiskBalancerMover diskBalancerMover = new DiskBalancerMover(fsDatasetSpy, conf); diskBalancerMover.setRunnable(); // bytesCopied - 20 * 1024 *1024 byteCopied. // timeUsed - 1200 in milliseconds // item - set DiskBalancerWorkItem bandwidth as 10 // Expect return sleep delay in Milliseconds. sleep value = bytesCopied / // (1024*1024*bandwidth in MB/milli) - timeUsed; long val = diskBalancerMover.computeDelay(20 * 1024 * 1024, 1200, item); Assert.assertEquals(val, (long) 800); } catch (Exception e) { Assert.fail("Unexpected exception: " + e); } finally { if (cluster != null) { cluster.shutdown(); } } } @Test public void testDiskBalancerWithFedClusterWithOneNameServiceEmpty() throws Exception { Configuration conf = new HdfsConfiguration(); conf.setBoolean(DFSConfigKeys.DFS_DISK_BALANCER_ENABLED, true); final int blockCount = 100; final int blockSize = 1024; final int diskCount = 2; final int dataNodeCount = 1; final int dataNodeIndex = 0; final int sourceDiskIndex = 0; final long cap = blockSize * 3L * blockCount; conf.setInt(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize); conf.setInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, blockSize); final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf) .nnTopology(MiniDFSNNTopology.simpleFederatedTopology(2)) .numDataNodes(dataNodeCount) .storagesPerDatanode(diskCount) .storageCapacities(new long[] {cap, cap}) .build(); cluster.waitActive(); DFSTestUtil.setFederatedConfiguration(cluster, conf); final String fileName = "/tmp.txt"; final Path filePath = new Path(fileName); long fileLen = blockCount * blockSize; //Writing data only to one nameservice. FileSystem fs = cluster.getFileSystem(0); TestBalancer.createFile(cluster, filePath, fileLen, (short) 1, 0); DFSTestUtil.waitReplication(fs, filePath, (short) 1); GenericTestUtils.LogCapturer logCapturer = GenericTestUtils.LogCapturer .captureLogs(DiskBalancer.LOG); try { DataMover dataMover = new DataMover(cluster, dataNodeIndex, sourceDiskIndex, conf, blockSize, blockCount); dataMover.moveDataToSourceDisk(); NodePlan plan = dataMover.generatePlan(); dataMover.executePlan(plan); dataMover.verifyPlanExectionDone(); // Because here we have one nameservice empty, don't check blockPoolCount. dataMover.verifyAllVolumesHaveData(false); } finally { String logOut = logCapturer.getOutput(); Assert.assertTrue("Wrong log: " + logOut, logOut.contains( "NextBlock call returned null. No valid block to copy.")); cluster.shutdown(); } } @Test public void testBalanceDataBetweenMultiplePairsOfVolumes() throws Exception { Configuration conf = new HdfsConfiguration(); conf.setBoolean(DFSConfigKeys.DFS_DISK_BALANCER_ENABLED, true); final int blockCount = 1000; final int blockSize = 1024; // create 3 disks, that means we will have 2 plans // Move Data from disk0->disk1 and disk0->disk2. final int diskCount = 3; final int dataNodeCount = 1; final int dataNodeIndex = 0; final int sourceDiskIndex = 0; final long cap = blockSize * 2L * blockCount; MiniDFSCluster cluster = new ClusterBuilder() .setBlockCount(blockCount) .setBlockSize(blockSize) .setDiskCount(diskCount) .setNumDatanodes(dataNodeCount) .setConf(conf) .setCapacities(new long[] {cap, cap, cap}) .build(); try { DataMover dataMover = new DataMover(cluster, dataNodeIndex, sourceDiskIndex, conf, blockSize, blockCount); dataMover.moveDataToSourceDisk(); NodePlan plan = dataMover.generatePlan(); // 3 disks , The plan should move data both disks, // so we must have 2 plan steps. assertEquals(plan.getVolumeSetPlans().size(), 2); dataMover.executePlan(plan); dataMover.verifyPlanExectionDone(); dataMover.verifyAllVolumesHaveData(true); dataMover.verifyTolerance(plan, 0, sourceDiskIndex, 10); } finally { cluster.shutdown(); } } /** * Test disk balancer behavior when one of the disks involved * in balancing operation is removed after submitting the plan. * @throws Exception */ @Test public void testDiskBalancerWhenRemovingVolumes() throws Exception { Configuration conf = new HdfsConfiguration(); conf.setBoolean(DFSConfigKeys.DFS_DISK_BALANCER_ENABLED, true); final int blockCount = 100; final int blockSize = 1024; final int diskCount = 2; final int dataNodeCount = 1; final int dataNodeIndex = 0; final int sourceDiskIndex = 0; final long cap = blockSize * 2L * blockCount; MiniDFSCluster cluster = new ClusterBuilder() .setBlockCount(blockCount) .setBlockSize(blockSize) .setDiskCount(diskCount) .setNumDatanodes(dataNodeCount) .setConf(conf) .setCapacities(new long[] {cap, cap}) .build(); try { DataMover dataMover = new DataMover(cluster, dataNodeIndex, sourceDiskIndex, conf, blockSize, blockCount); dataMover.moveDataToSourceDisk(); NodePlan plan = dataMover.generatePlan(); dataMover.executePlanDuringDiskRemove(plan); dataMover.verifyAllVolumesHaveData(true); dataMover.verifyTolerance(plan, 0, sourceDiskIndex, 10); } catch (Exception e) { Assert.fail("Unexpected exception: " + e); } finally { if (cluster != null) { cluster.shutdown(); } } } /** * Helper class that allows us to create different kinds of MiniDFSClusters * and populate data. */ static class ClusterBuilder { private Configuration conf; private int blockSize; private int numDatanodes; private int fileLen; private int blockCount; private int diskCount; private long[] capacities; public ClusterBuilder setConf(Configuration conf) { this.conf = conf; return this; } public ClusterBuilder setBlockSize(int blockSize) { this.blockSize = blockSize; return this; } public ClusterBuilder setNumDatanodes(int datanodeCount) { this.numDatanodes = datanodeCount; return this; } public ClusterBuilder setBlockCount(int blockCount) { this.blockCount = blockCount; return this; } public ClusterBuilder setDiskCount(int diskCount) { this.diskCount = diskCount; return this; } private ClusterBuilder setCapacities(final long[] caps) { this.capacities = caps; return this; } private StorageType[] getStorageTypes(int diskCount) { Preconditions.checkState(diskCount > 0); StorageType[] array = new StorageType[diskCount]; for (int x = 0; x < diskCount; x++) { array[x] = StorageType.DISK; } return array; } public MiniDFSCluster build() throws IOException, TimeoutException, InterruptedException { Preconditions.checkNotNull(this.conf); Preconditions.checkState(blockSize > 0); Preconditions.checkState(numDatanodes > 0); fileLen = blockCount * blockSize; Preconditions.checkState(fileLen > 0); conf.setBoolean(DFSConfigKeys.DFS_DISK_BALANCER_ENABLED, true); conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, blockSize); conf.setInt(DFSConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY, blockSize); conf.setLong(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1L); final String fileName = "/tmp.txt"; Path filePath = new Path(fileName); fileLen = blockCount * blockSize; // Write a file and restart the cluster MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf) .numDataNodes(numDatanodes) .storageCapacities(capacities) .storageTypes(getStorageTypes(diskCount)) .storagesPerDatanode(diskCount) .build(); generateData(filePath, cluster); cluster.restartDataNodes(); cluster.waitActive(); return cluster; } private void generateData(Path filePath, MiniDFSCluster cluster) throws IOException, InterruptedException, TimeoutException { cluster.waitActive(); FileSystem fs = cluster.getFileSystem(0); TestBalancer.createFile(cluster, filePath, fileLen, (short) 1, numDatanodes - 1); DFSTestUtil.waitReplication(fs, filePath, (short) 1); cluster.restartDataNodes(); cluster.waitActive(); } } class DataMover { private final MiniDFSCluster cluster; private final int sourceDiskIndex; private final int dataNodeIndex; private final Configuration conf; private final int blockCount; private final int blockSize; private DataNode node; /** * Constructs a DataMover class. * * @param cluster - MiniDFSCluster. * @param dataNodeIndex - Datanode to operate against. * @param sourceDiskIndex - source Disk Index. */ public DataMover(MiniDFSCluster cluster, int dataNodeIndex, int sourceDiskIndex, Configuration conf, int blockSize, int blockCount) { this.cluster = cluster; this.dataNodeIndex = dataNodeIndex; this.node = cluster.getDataNodes().get(dataNodeIndex); this.sourceDiskIndex = sourceDiskIndex; this.conf = conf; this.blockCount = blockCount; this.blockSize = blockSize; } /** * Moves all data to a source disk to create disk imbalance so we can run a * planner. * * @throws IOException */ public void moveDataToSourceDisk() throws IOException { moveAllDataToDestDisk(this.node, sourceDiskIndex); cluster.restartDataNodes(); cluster.waitActive(); } /** * Moves all data in the data node to one disk. * * @param dataNode - Datanode * @param destDiskindex - Index of the destination disk. */ private void moveAllDataToDestDisk(DataNode dataNode, int destDiskindex) throws IOException { Preconditions.checkNotNull(dataNode); Preconditions.checkState(destDiskindex >= 0); try (FsDatasetSpi.FsVolumeReferences refs = dataNode.getFSDataset().getFsVolumeReferences()) { if (refs.size() <= destDiskindex) { throw new IllegalArgumentException("Invalid Disk index."); } FsVolumeImpl dest = (FsVolumeImpl) refs.get(destDiskindex); for (int x = 0; x < refs.size(); x++) { if (x == destDiskindex) { continue; } FsVolumeImpl source = (FsVolumeImpl) refs.get(x); DiskBalancerTestUtil.moveAllDataToDestVolume(dataNode.getFSDataset(), source, dest); } } } /** * Generates a NodePlan for the datanode specified. * * @return NodePlan. */ public NodePlan generatePlan() throws Exception { // Start up a disk balancer and read the cluster info. node = cluster.getDataNodes().get(dataNodeIndex); ClusterConnector nameNodeConnector = ConnectorFactory.getCluster(cluster.getFileSystem(dataNodeIndex) .getUri(), conf); DiskBalancerCluster diskBalancerCluster = new DiskBalancerCluster(nameNodeConnector); diskBalancerCluster.readClusterInfo(); List<DiskBalancerDataNode> nodesToProcess = new LinkedList<>(); // Pick a node to process. nodesToProcess.add(diskBalancerCluster.getNodeByUUID( node.getDatanodeUuid())); diskBalancerCluster.setNodesToProcess(nodesToProcess); // Compute a plan. List<NodePlan> clusterplan = diskBalancerCluster.computePlan(0.0f); // Now we must have a plan,since the node is imbalanced and we // asked the disk balancer to create a plan. assertTrue(clusterplan.size() == 1); NodePlan plan = clusterplan.get(0); plan.setNodeUUID(node.getDatanodeUuid()); plan.setTimeStamp(Time.now()); assertNotNull(plan.getVolumeSetPlans()); assertTrue(plan.getVolumeSetPlans().size() > 0); plan.getVolumeSetPlans().get(0).setTolerancePercent(10); return plan; } /** * Waits for a plan executing to finish. */ public void executePlan(NodePlan plan) throws IOException, TimeoutException, InterruptedException { node = cluster.getDataNodes().get(dataNodeIndex); String planJson = plan.toJson(); String planID = DigestUtils.shaHex(planJson); // Submit the plan and wait till the execution is done. node.submitDiskBalancerPlan(planID, 1, PLAN_FILE, planJson, false); String jmxString = node.getDiskBalancerStatus(); assertNotNull(jmxString); DiskBalancerWorkStatus status = DiskBalancerWorkStatus.parseJson(jmxString); DiskBalancerWorkStatus realStatus = node.queryDiskBalancerPlan(); assertEquals(realStatus.getPlanID(), status.getPlanID()); GenericTestUtils.waitFor(new Supplier<Boolean>() { @Override public Boolean get() { try { return node.queryDiskBalancerPlan().getResult() == DiskBalancerWorkStatus.Result.PLAN_DONE; } catch (IOException ex) { return false; } } }, 1000, 100000); } public void executePlanDuringDiskRemove(NodePlan plan) throws IOException, TimeoutException, InterruptedException { CountDownLatch createWorkPlanLatch = new CountDownLatch(1); CountDownLatch removeDiskLatch = new CountDownLatch(1); AtomicInteger errorCount = new AtomicInteger(0); LOG.info("FSDataSet: " + node.getFSDataset()); final FsDatasetSpi<?> fsDatasetSpy = Mockito.spy(node.getFSDataset()); doAnswer(new Answer<Object>() { public Object answer(InvocationOnMock invocation) { try { node.getFSDataset().moveBlockAcrossVolumes( (ExtendedBlock)invocation.getArguments()[0], (FsVolumeSpi) invocation.getArguments()[1]); } catch (Exception e) { errorCount.incrementAndGet(); } return null; } }).when(fsDatasetSpy).moveBlockAcrossVolumes( any(ExtendedBlock.class), any(FsVolumeSpi.class)); DiskBalancerMover diskBalancerMover = new DiskBalancerMover( fsDatasetSpy, conf); diskBalancerMover.setRunnable(); DiskBalancerMover diskBalancerMoverSpy = Mockito.spy(diskBalancerMover); doAnswer(new Answer<Object>() { public Object answer(InvocationOnMock invocation) { createWorkPlanLatch.countDown(); LOG.info("Waiting for the disk removal!"); try { removeDiskLatch.await(); } catch (InterruptedException e) { LOG.info("Encountered " + e); } LOG.info("Got disk removal notification, resuming copyBlocks!"); diskBalancerMover.copyBlocks((VolumePair)(invocation .getArguments()[0]), (DiskBalancerWorkItem)(invocation .getArguments()[1])); return null; } }).when(diskBalancerMoverSpy).copyBlocks( any(VolumePair.class), any(DiskBalancerWorkItem.class)); DiskBalancer diskBalancer = new DiskBalancer(node.getDatanodeUuid(), conf, diskBalancerMoverSpy); List<String> oldDirs = new ArrayList<String>(node.getConf(). getTrimmedStringCollection(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY)); final String newDirs = oldDirs.get(0); LOG.info("Reconfigure newDirs:" + newDirs); Thread reconfigThread = new Thread() { public void run() { try { LOG.info("Waiting for work plan creation!"); createWorkPlanLatch.await(); LOG.info("Work plan created. Removing disk!"); assertThat( "DN did not update its own config", node. reconfigurePropertyImpl(DFS_DATANODE_DATA_DIR_KEY, newDirs), is(node.getConf().get(DFS_DATANODE_DATA_DIR_KEY))); Thread.sleep(1000); LOG.info("Removed disk!"); removeDiskLatch.countDown(); } catch (ReconfigurationException | InterruptedException e) { Assert.fail("Unexpected error while reconfiguring: " + e); } } }; reconfigThread.start(); String planJson = plan.toJson(); String planID = DigestUtils.shaHex(planJson); diskBalancer.submitPlan(planID, 1, PLAN_FILE, planJson, false); GenericTestUtils.waitFor(new Supplier<Boolean>() { @Override public Boolean get() { try { LOG.info("Work Status: " + diskBalancer. queryWorkStatus().toJsonString()); Result result = diskBalancer.queryWorkStatus().getResult(); return (result == Result.PLAN_DONE); } catch (IOException e) { return false; } } }, 1000, 100000); assertTrue("Disk balancer operation hit max errors!", errorCount.get() < DFSConfigKeys.DFS_DISK_BALANCER_MAX_DISK_ERRORS_DEFAULT); createWorkPlanLatch.await(); removeDiskLatch.await(); } /** * Verifies the Plan Execution has been done. */ public void verifyPlanExectionDone() throws IOException { node = cluster.getDataNodes().get(dataNodeIndex); assertEquals(node.queryDiskBalancerPlan().getResult(), DiskBalancerWorkStatus.Result.PLAN_DONE); } /** * Once diskBalancer is run, all volumes mush has some data. */ public void verifyAllVolumesHaveData(boolean checkblockPoolCount) throws IOException { node = cluster.getDataNodes().get(dataNodeIndex); try (FsDatasetSpi.FsVolumeReferences refs = node.getFSDataset().getFsVolumeReferences()) { for (FsVolumeSpi volume : refs) { assertTrue(DiskBalancerTestUtil.getBlockCount(volume, checkblockPoolCount) > 0); LOG.info("{} : Block Count : {}", refs, DiskBalancerTestUtil .getBlockCount(volume, checkblockPoolCount)); } } } /** * Verifies that tolerance values are honored correctly. */ public void verifyTolerance(NodePlan plan, int planIndex, int sourceDiskIndex, int tolerance) throws IOException { // Tolerance long delta = (plan.getVolumeSetPlans().get(planIndex).getBytesToMove() * tolerance) / 100; FsVolumeImpl volume = null; try (FsDatasetSpi.FsVolumeReferences refs = node.getFSDataset().getFsVolumeReferences()) { volume = (FsVolumeImpl) refs.get(sourceDiskIndex); assertTrue(DiskBalancerTestUtil.getBlockCount(volume, true) > 0); assertTrue((DiskBalancerTestUtil.getBlockCount(volume, true) * (blockSize + delta)) >= plan.getVolumeSetPlans().get(0) .getBytesToMove()); } } } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.codeInsight.hint.api.impls; import com.intellij.codeInsight.AnnotationTargetUtil; import com.intellij.codeInsight.AnnotationUtil; import com.intellij.codeInsight.CodeInsightBundle; import com.intellij.codeInsight.CodeInsightSettings; import com.intellij.codeInsight.completion.CompletionMemory; import com.intellij.codeInsight.completion.JavaCompletionUtil; import com.intellij.codeInsight.completion.JavaMethodCallElement; import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer; import com.intellij.codeInsight.daemon.impl.ParameterHintsPresentationManager; import com.intellij.codeInsight.hint.ParameterInfoController; import com.intellij.codeInsight.hints.ParameterHintsPass; import com.intellij.codeInsight.javadoc.JavaDocInfoGenerator; import com.intellij.codeInsight.lookup.LookupElement; import com.intellij.injected.editor.EditorWindow; import com.intellij.lang.parameterInfo.*; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.editor.*; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.UserDataHolder; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.*; import com.intellij.psi.impl.PsiImplUtil; import com.intellij.psi.impl.source.resolve.CompletionParameterTypeInferencePolicy; import com.intellij.psi.infos.CandidateInfo; import com.intellij.psi.infos.MethodCandidateInfo; import com.intellij.psi.scope.MethodProcessorSetupFailedException; import com.intellij.psi.scope.PsiConflictResolver; import com.intellij.psi.scope.processor.MethodCandidatesProcessor; import com.intellij.psi.scope.processor.MethodResolverProcessor; import com.intellij.psi.scope.util.PsiScopesUtil; import com.intellij.psi.tree.IElementType; import com.intellij.psi.util.MethodSignatureUtil; import com.intellij.util.DocumentUtil; import com.intellij.util.ObjectUtils; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.text.CharArrayUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.awt.*; import java.util.*; import java.util.List; /** * @author Maxim.Mossienko */ public class MethodParameterInfoHandler implements ParameterInfoHandlerWithTabActionSupport<PsiExpressionList, Object, PsiExpression>, DumbAware { private static final Set<Class> ourArgumentListAllowedParentClassesSet = ContainerUtil.newHashSet( PsiMethodCallExpression.class, PsiNewExpression.class, PsiAnonymousClass.class, PsiEnumConstant.class); private static final Set<Class> ourStopSearch = Collections.singleton(PsiMethod.class); private static final String WHITESPACE_OR_LINE_BREAKS = " \t\n"; private static final Key<Inlay> CURRENT_HINT = Key.create("current.hint"); private static final Key<List<Inlay>> HIGHLIGHTED_HINTS = Key.create("highlighted.hints"); @Override public Object[] getParametersForLookup(LookupElement item, ParameterInfoContext context) { final List<? extends PsiElement> elements = JavaCompletionUtil.getAllPsiElements(item); return elements != null && !elements.isEmpty() && elements.get(0) instanceof PsiMethod ? elements.toArray() : null; } @Override public boolean couldShowInLookup() { return true; } @Override @Nullable public PsiExpressionList findElementForParameterInfo(@NotNull final CreateParameterInfoContext context) { PsiExpressionList argumentList = findArgumentList(context.getFile(), context.getOffset(), context.getParameterListStart(), true); if (argumentList != null) { return findMethodsForArgumentList(context, argumentList); } return null; } private PsiExpressionList findArgumentList(final PsiFile file, int offset, int parameterStart, boolean allowOuter) { PsiExpressionList argumentList = ParameterInfoUtils.findArgumentList(file, offset, parameterStart, this, allowOuter); if (argumentList == null && allowOuter) { PsiCall call = ParameterInfoUtils.findParentOfTypeWithStopElements(file, offset, PsiMethodCallExpression.class, PsiMethod.class); if (call == null) { call = ParameterInfoUtils.findParentOfTypeWithStopElements(file, offset, PsiNewExpression.class, PsiMethod.class); } if (call != null) { argumentList = call.getArgumentList(); } } return argumentList; } private static PsiExpressionList findMethodsForArgumentList(final CreateParameterInfoContext context, @NotNull final PsiExpressionList argumentList) { CandidateInfo[] candidates = getMethods(argumentList); if (candidates.length == 0) { DaemonCodeAnalyzer.getInstance(context.getProject()).updateVisibleHighlighters(context.getEditor()); return null; } context.setItemsToShow(candidates); return argumentList; } @Override public void showParameterInfo(@NotNull final PsiExpressionList element, @NotNull final CreateParameterInfoContext context) { int offset = element.getTextRange().getStartOffset(); if (CodeInsightSettings.getInstance().SHOW_PARAMETER_NAME_HINTS_ON_COMPLETION) { ParameterInfoController controller = ParameterInfoController.findControllerAtOffset(context.getEditor(), offset); PsiElement parent = element.getParent(); if (parent instanceof PsiCall && controller != null && controller.isHintShown(false)) { Object highlighted = controller.getHighlighted(); Object[] objects = controller.getObjects(); if (objects != null && objects.length > 0 && (highlighted != null || objects.length == 1)) { PsiCall methodCall = (PsiCall)parent; JavaMethodCallElement.setCompletionModeIfNotSet(methodCall, controller); PsiMethod targetMethod = (PsiMethod)((CandidateInfo)(highlighted == null ? objects[0] : highlighted)).getElement(); CompletionMemory.registerChosenMethod(targetMethod, methodCall); controller.setPreservedOnHintHidden(true); ParameterHintsPass.syncUpdate(methodCall, context.getEditor()); } } } context.showHint(element, offset, this); } @Override public PsiExpressionList findElementForUpdatingParameterInfo(@NotNull final UpdateParameterInfoContext context) { if (context.isPreservedOnHintHidden() && isOutsideOfCompletedInvocation(context)) { context.setPreservedOnHintHidden(false); return null; } PsiExpressionList expressionList = findArgumentList(context.getFile(), context.getOffset(), context.getParameterListStart(), false); if (expressionList != null) { Object[] candidates = context.getObjectsToView(); if (candidates != null && candidates.length != 0) { Object currentMethodInfo = context.getHighlightedParameter(); if (currentMethodInfo == null) currentMethodInfo = candidates[0]; PsiElement element = currentMethodInfo instanceof CandidateInfo ? ((CandidateInfo)currentMethodInfo).getElement() : currentMethodInfo instanceof PsiElement ? (PsiElement) currentMethodInfo : null; if ((element instanceof PsiMethod)) { PsiMethod method = (PsiMethod)element; PsiElement parent = expressionList.getParent(); String originalMethodName = method.getName(); PsiQualifiedReference currentMethodReference = null; if (parent instanceof PsiMethodCallExpression && !method.isConstructor()) { currentMethodReference = ((PsiMethodCallExpression)parent).getMethodExpression(); } else if (parent instanceof PsiNewExpression) { currentMethodReference = ((PsiNewExpression)parent).getClassReference(); } else if (parent instanceof PsiAnonymousClass) { currentMethodReference = ((PsiAnonymousClass)parent).getBaseClassReference(); } if (currentMethodReference == null || originalMethodName.equals(currentMethodReference.getReferenceName())) { int currentNumberOfParameters = expressionList.getExpressionCount(); PsiDocumentManager psiDocumentManager = PsiDocumentManager.getInstance(context.getProject()); Document document = psiDocumentManager.getCachedDocument(context.getFile()); if (parent instanceof PsiCallExpression && JavaMethodCallElement.isCompletionMode((PsiCall)parent)) { PsiMethod chosenMethod = CompletionMemory.getChosenMethod((PsiCall)parent); if ((context.getHighlightedParameter() != null || candidates.length == 1) && chosenMethod != null && document != null && psiDocumentManager.isCommitted(document) && isIncompatibleParameterCount(chosenMethod, currentNumberOfParameters)) { JavaMethodCallElement.setCompletionMode((PsiCall)parent, false); ParameterHintsPass.syncUpdate(parent, context.getEditor()); // make sure the statement above takes effect highlightHints(context.getEditor(), null, -1, context.getCustomContext()); } else { int index = ParameterInfoUtils.getCurrentParameterIndex(expressionList.getNode(), context.getOffset(), JavaTokenType.COMMA); TextRange textRange = expressionList.getTextRange(); if (context.getOffset() <= textRange.getStartOffset() || context.getOffset() >= textRange.getEndOffset()) index = -1; highlightHints(context.getEditor(), expressionList, context.isInnermostContext() ? index : -1, context.getCustomContext()); } } return expressionList; } } } } highlightHints(context.getEditor(), null, -1, context.getCustomContext()); return null; } private static boolean isOutsideOfCompletedInvocation(UpdateParameterInfoContext context) { PsiElement owner = context.getParameterOwner(); if (owner != null && owner.isValid()) { TextRange ownerTextRange = getRelatedRange(owner, context.getEditor()); int caretOffset = context.getOffset(); if (ownerTextRange != null) { if (caretOffset >= ownerTextRange.getStartOffset() && caretOffset <= ownerTextRange.getEndOffset()) { return false; } else { for (PsiElement element : owner.getChildren()) { if (element instanceof PsiErrorElement) return false; } if (owner instanceof PsiExpressionList && ((PsiExpressionList)owner).isEmpty()) { PsiElement parent = owner.getParent(); if (parent instanceof PsiCall) { PsiMethod chosenMethod = CompletionMemory.getChosenMethod((PsiCall)parent); if (chosenMethod != null) { int parametersCount = chosenMethod.getParameterList().getParametersCount(); if ((parametersCount == 1 && !chosenMethod.isVarArgs() || parametersCount == 2 && chosenMethod.isVarArgs()) && !overloadWithNoParametersExists(chosenMethod, context.getObjectsToView())) return false; } } } } } } return true; } private static TextRange getRelatedRange(PsiElement owner, Editor editor) { TextRange range = owner.getTextRange(); if (range == null) return null; Document document = editor.getDocument(); if (Registry.is("editor.keep.completion.hints.even.longer")) { int startY = editor.visualPositionToXY(editor.offsetToVisualPosition(range.getStartOffset())).y; int endY = editor.visualPositionToXY(editor.offsetToVisualPosition(range.getEndOffset())).y; Rectangle visibleArea = editor.getScrollingModel().getVisibleArea(); return startY > visibleArea.getMaxY() || endY < visibleArea.getMinY() ? null : new TextRange(0, document.getTextLength()); } if (!Registry.is("editor.keep.completion.hints.longer")) return range; return new TextRange(DocumentUtil.getLineStartOffset(range.getStartOffset(), document), DocumentUtil.getLineEndOffset(range.getEndOffset(), document)); } private static boolean overloadWithNoParametersExists(PsiMethod method, Object[] candidates) { String methodName = method.getName(); return ContainerUtil.find(candidates, c -> { if (!(c instanceof CandidateInfo)) return false; PsiElement e = ((CandidateInfo)c).getElement(); if (!(e instanceof PsiMethod)) return false; PsiMethod m = (PsiMethod)e; return m.getParameterList().isEmpty() && m.getName().equals(methodName); }) != null; } private static boolean isIncompatibleParameterCount(@NotNull PsiMethod method, int numberOfParameters) { int limit = JavaMethodCallElement.getCompletionHintsLimit(); int originalNumberOfParameters = method.getParameterList().getParametersCount(); return Registry.is("editor.completion.hints.virtual.comma") ? !PsiImplUtil.isVarArgs(method) && numberOfParameters > originalNumberOfParameters : PsiImplUtil.isVarArgs(method) ? originalNumberOfParameters > 2 && numberOfParameters < Math.min(limit, originalNumberOfParameters) - 1 && !(limit == 1 && numberOfParameters == 0) : (originalNumberOfParameters < numberOfParameters || numberOfParameters < Math.min(limit, originalNumberOfParameters)) && !(Math.min(limit, originalNumberOfParameters) == 1 && numberOfParameters == 0); } @Override public void updateParameterInfo(@NotNull final PsiExpressionList o, @NotNull final UpdateParameterInfoContext context) { PsiElement parameterOwner = context.getParameterOwner(); if (parameterOwner != o) { context.removeHint(); return; } int offset = context.getOffset(); TextRange elRange = o.getTextRange(); int index = offset <= elRange.getStartOffset() || offset >= elRange.getEndOffset() ? -1 : ParameterInfoUtils.getCurrentParameterIndex(o.getNode(), offset, JavaTokenType.COMMA); context.setCurrentParameter(index); Object[] candidates = context.getObjectsToView(); PsiExpression[] args = o.getExpressions(); PsiCall call = getCall(o); PsiElement realResolve = call != null ? call.resolveMethod() : null; PsiMethod chosenMethod = CompletionMemory.getChosenMethod(call); CandidateInfo chosenInfo = null; CandidateInfo completeMatch = null; for (int i = 0; i < candidates.length; i++) { CandidateInfo candidate = (CandidateInfo)candidates[i]; PsiMethod method = (PsiMethod)candidate.getElement(); if (!method.isValid()) continue; if (candidate instanceof MethodCandidateInfo && !((MethodCandidateInfo)candidate).getSiteSubstitutor().isValid()) continue; PsiSubstitutor substitutor = getCandidateInfoSubstitutor(o, candidate, method == realResolve); assert substitutor != null; if (!method.isValid() || !substitutor.isValid()) { // this may sometimes happen e,g, when editing method call in field initializer candidates in the same file get invalidated context.setUIComponentEnabled(i, false); continue; } PsiParameter[] parms = method.getParameterList().getParameters(); boolean enabled = true; if (parms.length <= index) { if (parms.length > 0) { if (method.isVarArgs()) { for (int j = 0; j < parms.length - 1; j++) { PsiType parmType = substitutor.substitute(parms[j].getType()); PsiType argType = args[j].getType(); if (argType != null && !parmType.isAssignableFrom(argType)) { enabled = false; break; } } if (enabled) { PsiArrayType lastParmType = (PsiArrayType)substitutor.substitute(parms[parms.length - 1].getType()); PsiType componentType = lastParmType.getComponentType(); if (parms.length == args.length) { PsiType lastArgType = args[args.length - 1].getType(); if (lastArgType != null && !lastParmType.isAssignableFrom(lastArgType) && !componentType.isAssignableFrom(lastArgType)) { enabled = false; } } else { for (int j = parms.length; j <= index && j < args.length; j++) { PsiExpression arg = args[j]; PsiType argType = arg.getType(); if (argType != null && !componentType.isAssignableFrom(argType)) { enabled = false; break; } } } } } else { enabled = false; } } else { enabled = index == 0; } } else { enabled = isAssignableParametersBeforeGivenIndex(parms, args, index, substitutor); } context.setUIComponentEnabled(i, enabled); if (candidates.length > 1 && enabled) { if (PsiManager.getInstance(context.getProject()).areElementsEquivalent(chosenMethod, method)) { chosenInfo = candidate; } if (parms.length == args.length && realResolve == method && isAssignableParametersBeforeGivenIndex(parms, args, args.length, substitutor)) { completeMatch = candidate; } } } context.setHighlightedParameter((Registry.is("editor.completion.hints.virtual.comma") || args.length == 0) && chosenInfo != null ? chosenInfo : ObjectUtils.coalesce(completeMatch, chosenInfo)); Object highlightedCandidate = candidates.length == 1 ? candidates[0] : context.getHighlightedParameter(); if (highlightedCandidate != null) { PsiMethod method = (PsiMethod)(highlightedCandidate instanceof CandidateInfo ? ((CandidateInfo)highlightedCandidate).getElement() : highlightedCandidate); if (!method.isVarArgs() && index > 0 && index >= method.getParameterList().getParametersCount()) context.setCurrentParameter(-1); } } private static void highlightHints(@NotNull Editor editor, @Nullable PsiExpressionList expressionList, int currentHintIndex, @NotNull UserDataHolder context) { if (editor.isDisposed() || editor instanceof EditorWindow) return; ParameterHintsPresentationManager presentationManager = ParameterHintsPresentationManager.getInstance(); Inlay currentHint = null; List<Inlay> highlightedHints = null; if (expressionList != null && expressionList.isValid()) { int expressionCount = expressionList.getExpressionCount(); if (currentHintIndex == 0 || currentHintIndex > 0 && currentHintIndex < expressionCount) { highlightedHints = new ArrayList<>(expressionCount); ParameterHintsPass.syncUpdate(expressionList.getParent(), editor); PsiElement prevDelimiter, nextDelimiter; for (int i = 0; i < Math.max(expressionCount, currentHintIndex == 0 ? 1 : 0); i++) { if (i < expressionCount) { PsiExpression expression = expressionList.getExpressions()[i]; //noinspection StatementWithEmptyBody for (prevDelimiter = expression; prevDelimiter != null && !(prevDelimiter instanceof PsiJavaToken); prevDelimiter = prevDelimiter.getPrevSibling()) ; //noinspection StatementWithEmptyBody for (nextDelimiter = expression; nextDelimiter != null && !(nextDelimiter instanceof PsiJavaToken); nextDelimiter = nextDelimiter.getNextSibling()) ; } else { prevDelimiter = expressionList.getFirstChild(); // left parenthesis nextDelimiter = expressionList.getLastChild(); // right parenthesis } if (prevDelimiter != null && nextDelimiter != null) { CharSequence text = editor.getDocument().getImmutableCharSequence(); int firstRangeStartOffset = prevDelimiter.getTextRange().getEndOffset(); int firstRangeEndOffset = CharArrayUtil.shiftForward(text, firstRangeStartOffset, WHITESPACE_OR_LINE_BREAKS); for (Inlay inlay : editor.getInlayModel().getInlineElementsInRange(firstRangeStartOffset, firstRangeEndOffset)) { if (presentationManager.isParameterHint(inlay)) { highlightedHints.add(inlay); if (i == currentHintIndex && currentHint == null) currentHint = inlay; } } int secondRangeEndOffset = nextDelimiter.getTextRange().getStartOffset(); if (secondRangeEndOffset > firstRangeEndOffset) { int secondRangeStartOffset = CharArrayUtil.shiftBackward(text, secondRangeEndOffset - 1, WHITESPACE_OR_LINE_BREAKS) + 1; for (Inlay inlay : editor.getInlayModel().getInlineElementsInRange(secondRangeStartOffset, secondRangeEndOffset)) { if (presentationManager.isParameterHint(inlay)) { highlightedHints.add(inlay); } } } } } } } if (currentHint == context.getUserData(CURRENT_HINT) && Objects.equals(highlightedHints, context.getUserData(HIGHLIGHTED_HINTS))) return; resetHints(context); if (currentHint != null) { presentationManager.setCurrent(currentHint, true); context.putUserData(CURRENT_HINT, currentHint); } if (!ContainerUtil.isEmpty(highlightedHints)) { for (Inlay highlightedHint : highlightedHints) { presentationManager.setHighlighted(highlightedHint, true); } context.putUserData(HIGHLIGHTED_HINTS, highlightedHints); } } private static void resetHints(@NotNull UserDataHolder context) { ParameterHintsPresentationManager presentationManager = ParameterHintsPresentationManager.getInstance(); Inlay currentHint = context.getUserData(CURRENT_HINT); if (currentHint != null) { presentationManager.setCurrent(currentHint, false); context.putUserData(CURRENT_HINT, null); } List<Inlay> highlightedHints = context.getUserData(HIGHLIGHTED_HINTS); if (highlightedHints != null) { for (Inlay hint : highlightedHints) { presentationManager.setHighlighted(hint, false); } context.putUserData(HIGHLIGHTED_HINTS, null); } } @Override public void dispose(@NotNull DeleteParameterInfoContext context) { Editor editor = context.getEditor(); if (!(editor instanceof EditorWindow)) { resetHints(context.getCustomContext()); PsiElement parameterOwner = context.getParameterOwner(); if (!editor.isDisposed() && parameterOwner != null && parameterOwner.isValid()) { ParameterHintsPass.syncUpdate(parameterOwner.getParent(), editor); } } } private static PsiSubstitutor getCandidateInfoSubstitutor(PsiElement argList, CandidateInfo candidate, boolean resolveResult) { Computable<PsiSubstitutor> computeSubstitutor = () -> candidate instanceof MethodCandidateInfo && ((MethodCandidateInfo)candidate).isInferencePossible() ? ((MethodCandidateInfo)candidate).inferTypeArguments(CompletionParameterTypeInferencePolicy.INSTANCE, true) : candidate.getSubstitutor(); if (resolveResult && candidate instanceof MethodCandidateInfo && ((MethodCandidateInfo)candidate).isInferencePossible()) { return computeSubstitutor.compute(); } return MethodCandidateInfo.ourOverloadGuard.doPreventingRecursion(ObjectUtils.notNull(argList, candidate.getElement()), false, computeSubstitutor); } private static boolean isAssignableParametersBeforeGivenIndex(final PsiParameter[] parms, final PsiExpression[] args, int length, PsiSubstitutor substitutor) { for (int j = 0; j < length; j++) { PsiParameter parm = parms[j]; PsiExpression arg = args[j]; assert parm.isValid(); assert arg.isValid(); PsiType parmType = parm.getType(); PsiType argType = arg.getType(); if (argType == null) continue; if (parmType instanceof PsiEllipsisType ) { parmType = ((PsiEllipsisType)parmType).getComponentType(); } parmType = substitutor.substitute(parmType); if (!parmType.isAssignableFrom(argType)) { return false; } } return true; } @Override @NotNull public Class<PsiExpressionList> getArgumentListClass() { return PsiExpressionList.class; } @Override @NotNull public IElementType getActualParametersRBraceType() { return JavaTokenType.RBRACE; } @Override @NotNull public Set<Class> getArgumentListAllowedParentClasses() { return ourArgumentListAllowedParentClassesSet; } @NotNull @Override public Set<Class> getArgListStopSearchClasses() { return ourStopSearch; } @Override @NotNull public IElementType getActualParameterDelimiterType() { return JavaTokenType.COMMA; } @Override @NotNull public PsiExpression[] getActualParameters(@NotNull PsiExpressionList psiExpressionList) { return psiExpressionList.getExpressions(); } private static PsiCall getCall(PsiExpressionList list) { PsiElement listParent = list.getParent(); if (listParent instanceof PsiMethodCallExpression) { return (PsiCall)listParent; } if (listParent instanceof PsiNewExpression) { return (PsiCall)listParent; } if (listParent instanceof PsiAnonymousClass) { return (PsiCall)listParent.getParent(); } if (listParent instanceof PsiEnumConstant) { return (PsiCall)listParent; } return null; } private static CandidateInfo[] getMethods(PsiExpressionList argList) { final PsiCall call = getCall(argList); PsiResolveHelper helper = JavaPsiFacade.getInstance(argList.getProject()).getResolveHelper(); if (call instanceof PsiCallExpression) { CandidateInfo[] candidates = getCandidates((PsiCallExpression)call); ArrayList<CandidateInfo> result = new ArrayList<>(); if (!(argList.getParent() instanceof PsiAnonymousClass)) { cand: for (CandidateInfo candidate : candidates) { PsiMethod methodCandidate = (PsiMethod)candidate.getElement(); for (CandidateInfo info : result) { if (MethodSignatureUtil.isSuperMethod(methodCandidate, (PsiMethod)info.getElement())) { continue cand; } } if (candidate.isStaticsScopeCorrect()) { boolean accessible = candidate.isAccessible(); if (!accessible && methodCandidate.getModifierList().hasModifierProperty(PsiModifier.PRIVATE)) { // privates are accessible within one file accessible = JavaPsiFacade.getInstance(methodCandidate.getProject()).getResolveHelper() .isAccessible(methodCandidate, methodCandidate.getModifierList(), call, null, null); } if (accessible) result.add(candidate); } } } else { PsiClass aClass = (PsiClass)argList.getParent(); for (CandidateInfo candidate : candidates) { if (candidate.isStaticsScopeCorrect() && helper.isAccessible((PsiMethod)candidate.getElement(), argList, aClass)) { result.add(candidate); } } } return result.isEmpty() ? candidates : result.toArray(CandidateInfo.EMPTY_ARRAY); } else { assert call instanceof PsiEnumConstant; //We are inside our own enum, no isAccessible check needed PsiMethod[] constructors = ((PsiEnumConstant)call).getContainingClass().getConstructors(); CandidateInfo[] result = new CandidateInfo[constructors.length]; for (int i = 0; i < constructors.length; i++) { result[i] = new CandidateInfo(constructors[i], PsiSubstitutor.EMPTY); } return result; } } private static CandidateInfo[] getCandidates(PsiCallExpression call) { final MethodCandidatesProcessor processor = new MethodResolverProcessor(call, call.getContainingFile(), new PsiConflictResolver[0]) { @Override protected boolean acceptVarargs() { return false; } }; try { PsiScopesUtil.setupAndRunProcessor(processor, call, true); } catch (MethodProcessorSetupFailedException e) { return CandidateInfo.EMPTY_ARRAY; } final List<CandidateInfo> results = processor.getResults(); return results.toArray(CandidateInfo.EMPTY_ARRAY); } public static String updateMethodPresentation(@NotNull PsiMethod method, @Nullable PsiSubstitutor substitutor, @NotNull ParameterInfoUIContext context) { CodeInsightSettings settings = CodeInsightSettings.getInstance(); if (!method.isValid() || substitutor != null && !substitutor.isValid()) { context.setUIComponentEnabled(false); return null; } StringBuilder buffer = new StringBuilder(); if (settings.SHOW_FULL_SIGNATURES_IN_PARAMETER_INFO && !context.isSingleParameterInfo()) { if (!method.isConstructor()) { PsiType returnType = method.getReturnType(); if (substitutor != null) { returnType = substitutor.substitute(returnType); } assert returnType != null : method; appendModifierList(buffer, method); buffer.append(returnType.getPresentableText(true)); buffer.append(" "); } buffer.append(method.getName()); buffer.append("("); } int currentParameter = context.getCurrentParameterIndex(); PsiParameter[] parms = method.getParameterList().getParameters(); int numParams = parms.length; int highlightStartOffset = -1; int highlightEndOffset = -1; if (numParams > 0) { if (context.isSingleParameterInfo() && method.isVarArgs() && currentParameter >= numParams) currentParameter = numParams - 1; for (int j = 0; j < numParams; j++) { if (context.isSingleParameterInfo() && j != currentParameter) continue; PsiParameter param = parms[j]; int startOffset = buffer.length(); if (param.isValid()) { PsiType paramType = param.getType(); assert paramType.isValid(); if (substitutor != null) { assert substitutor.isValid(); paramType = substitutor.substitute(paramType); } if (context.isSingleParameterInfo()) buffer.append("<b>"); appendModifierList(buffer, param); String type = paramType.getPresentableText(true); buffer.append(context.isSingleParameterInfo() ? StringUtil.escapeXml(type) : type); String name = param.getName(); if (name != null && !context.isSingleParameterInfo()) { buffer.append(" "); buffer.append(name); } if (context.isSingleParameterInfo()) buffer.append("</b>"); } if (context.isSingleParameterInfo()) { String javaDoc = new JavaDocInfoGenerator(param.getProject(), param).generateMethodParameterJavaDoc(); if (javaDoc != null) { javaDoc = removeHyperlinks(javaDoc); if (javaDoc.length() < 100) { buffer.append("&nbsp;&nbsp;<i>").append(javaDoc).append("</i>"); } else { buffer.insert(0, "<table><tr><td valign='top'>") .append("</td><td style='width:400px'>&nbsp;&nbsp;<i>").append(javaDoc).append("</i></td></tr></table>"); } } } else { int endOffset = buffer.length(); if (j < numParams - 1) { buffer.append(", "); } if (context.isUIComponentEnabled() && (j == currentParameter || j == numParams - 1 && param.isVarArgs() && currentParameter >= numParams)) { highlightStartOffset = startOffset; highlightEndOffset = endOffset; } } } } else { buffer.append(CodeInsightBundle.message("parameter.info.no.parameters")); } if (settings.SHOW_FULL_SIGNATURES_IN_PARAMETER_INFO && !context.isSingleParameterInfo()) { buffer.append(")"); } String text = buffer.toString(); if (context.isSingleParameterInfo()) { context.setupRawUIComponentPresentation(text); return text; } else { return context.setupUIComponentPresentation( text, highlightStartOffset, highlightEndOffset, !context.isUIComponentEnabled(), method.isDeprecated() && !context.isSingleParameterInfo() && !context.isSingleOverload(), false, context.getDefaultParameterColor() ); } } private static String removeHyperlinks(String html) { return html.replaceAll("<a.*?>", "").replaceAll("</a>", ""); } private static void appendModifierList(@NotNull StringBuilder buffer, @NotNull PsiModifierListOwner owner) { int lastSize = buffer.length(); Set<String> shownAnnotations = ContainerUtil.newHashSet(); for (PsiAnnotation annotation : AnnotationUtil.getAllAnnotations(owner, false, null, !DumbService.isDumb(owner.getProject()))) { final PsiJavaCodeReferenceElement element = annotation.getNameReferenceElement(); if (element != null) { final PsiElement resolved = element.resolve(); if (resolved instanceof PsiClass && (!JavaDocInfoGenerator.isDocumentedAnnotationType((PsiClass)resolved) || AnnotationTargetUtil.findAnnotationTarget((PsiClass)resolved, PsiAnnotation.TargetType.TYPE_USE) != null)) { continue; } String referenceName = element.getReferenceName(); if (shownAnnotations.add(referenceName) || JavaDocInfoGenerator.isRepeatableAnnotationType(resolved)) { if (lastSize != buffer.length()) buffer.append(' '); buffer.append('@').append(referenceName); } } } if (lastSize != buffer.length()) buffer.append(' '); } @Override public void updateUI(final Object p, @NotNull final ParameterInfoUIContext context) { if (p instanceof CandidateInfo) { CandidateInfo info = (CandidateInfo)p; PsiMethod method = (PsiMethod)info.getElement(); if (!method.isValid() || info instanceof MethodCandidateInfo && !((MethodCandidateInfo)info).getSiteSubstitutor().isValid()) { context.setUIComponentEnabled(false); return; } PsiElement parameterOwner = context.getParameterOwner(); PsiCall call = parameterOwner instanceof PsiExpressionList ? getCall((PsiExpressionList)parameterOwner) : null; updateMethodPresentation(method, getCandidateInfoSubstitutor(parameterOwner, info, call != null && call.resolveMethod() == method), context); } else { updateMethodPresentation((PsiMethod)p, null, context); } } @Override public boolean supportsOverloadSwitching() { return CodeInsightSettings.getInstance().SHOW_PARAMETER_NAME_HINTS_ON_COMPLETION; } @Override public void syncUpdateOnCaretMove(@NotNull UpdateParameterInfoContext context) { if (!Registry.is("editor.completion.hints.virtual.comma")) return; Editor editor = context.getEditor(); Caret caret = editor.getCaretModel().getCurrentCaret(); int caretOffset = caret.getOffset(); List<Inlay> inlays = editor.getInlayModel().getInlineElementsInRange(caretOffset, caretOffset); if (inlays.isEmpty()) return; VisualPosition caretPosition = caret.getVisualPosition(); ParameterHintsPresentationManager pm = ParameterHintsPresentationManager.getInstance(); int inlaysBeforeCaretWithComma = ContainerUtil.count(inlays, inlay -> pm.isParameterHint(inlay) && StringUtil.startsWithChar(pm.getHintText(inlay), ',') && caretPosition.after(inlay.getVisualPosition())); if (inlaysBeforeCaretWithComma == 0) return; Project project = context.getProject(); String textToInsert = StringUtil.repeat(", ", inlaysBeforeCaretWithComma); WriteCommandAction.runWriteCommandAction(project, () -> { editor.getDocument().insertString(caretOffset, textToInsert); caret.moveToOffset(caretOffset + textToInsert.length()); }); PsiDocumentManager.getInstance(project).commitDocument(editor.getDocument()); PsiElement exprList = context.getParameterOwner(); if (!(exprList instanceof PsiExpressionList) || !exprList.isValid()) return; PsiElement call = exprList.getParent(); if (call == null || !call.isValid()) return; ParameterHintsPass.syncUpdate(call, editor); int index = ParameterInfoUtils.getCurrentParameterIndex(exprList.getNode(), editor.getCaretModel().getOffset(), JavaTokenType.COMMA); highlightHints(editor, (PsiExpressionList)exprList, index, context.getCustomContext()); } }
package com.devmarvel.creditcardentry.library; import android.content.Context; import android.content.res.TypedArray; import android.graphics.drawable.Drawable; import android.os.Build; import android.os.Parcel; import android.os.Parcelable; import android.support.annotation.NonNull; import android.support.v4.os.ParcelableCompat; import android.support.v4.os.ParcelableCompatCreatorCallbacks; import android.util.AttributeSet; import android.util.SparseArray; import android.view.Gravity; import android.view.View; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.RelativeLayout; import android.widget.TextView; import com.devmarvel.creditcardentry.R; import com.devmarvel.creditcardentry.internal.CreditCardEntry; public class CreditCardForm extends RelativeLayout { private CreditCardEntry entry; private boolean includeExp = true; private boolean includeSecurity = true; private boolean includeZip = true; private boolean includeHelper; private int textHelperColor; private Drawable inputBackground; private String cardNumberHint = "1234 5678 9012 3456"; public CreditCardForm(Context context) { this(context, null); } public CreditCardForm(Context context, AttributeSet attrs) { this(context, attrs, 0); } public CreditCardForm(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); if(!isInEditMode()) { // If the attributes are available, use them to color the icon if (attrs != null) { TypedArray typedArray = null; try { typedArray = context.getTheme().obtainStyledAttributes( attrs, R.styleable.CreditCardForm, 0, 0 ); this.cardNumberHint = typedArray.getString(R.styleable.CreditCardForm_card_number_hint); this.includeExp = typedArray.getBoolean(R.styleable.CreditCardForm_include_exp, true); this.includeSecurity = typedArray.getBoolean(R.styleable.CreditCardForm_include_security, true); this.includeZip = typedArray.getBoolean(R.styleable.CreditCardForm_include_zip, true); this.includeHelper = typedArray.getBoolean(R.styleable.CreditCardForm_include_helper, true); this.textHelperColor = typedArray.getColor(R.styleable.CreditCardForm_helper_text_color, getResources().getColor(R.color.text_helper_color)); this.inputBackground = typedArray.getDrawable(R.styleable.CreditCardForm_input_background); } finally { if (typedArray != null) typedArray.recycle(); } } // defaults if not set by user if(cardNumberHint == null) cardNumberHint = "1234 5678 9012 3456"; if(inputBackground == null) { //noinspection deprecation inputBackground = context.getResources().getDrawable(R.drawable.background_white); } } init(context, attrs, defStyle); } private void init(Context context, AttributeSet attrs, int style) { // the wrapper layout LinearLayout layout; if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { layout = new LinearLayout(context); } else { layout = new LinearLayout(context); } layout.setId(R.id.cc_form_layout); LayoutParams params = new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.WRAP_CONTENT); params.addRule(RelativeLayout.ALIGN_PARENT_LEFT); params.addRule(RelativeLayout.ALIGN_PARENT_TOP); params.addRule(LinearLayout.HORIZONTAL); params.setMargins(0, 0, 0, 0); layout.setLayoutParams(params); layout.setPadding(0, 0, 0, 0); //noinspection deprecation layout.setBackgroundDrawable(inputBackground); // set up the card image container and images FrameLayout cardImageFrame = new FrameLayout(context); LinearLayout.LayoutParams frameParams = new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); frameParams.gravity = Gravity.CENTER_VERTICAL; cardImageFrame.setLayoutParams(frameParams); cardImageFrame.setFocusable(true); cardImageFrame.setFocusableInTouchMode(true); cardImageFrame.setPadding(10, 0, 0, 0); ImageView cardFrontImage = new ImageView(context); LayoutParams layoutParams = new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); cardFrontImage.setLayoutParams(layoutParams); cardFrontImage.setImageResource(CardType.INVALID.frontResource); cardImageFrame.addView(cardFrontImage); ImageView cardBackImage = new ImageView(context); layoutParams = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); cardBackImage.setLayoutParams(layoutParams); cardBackImage.setImageResource(CardType.INVALID.backResource); cardBackImage.setVisibility(View.GONE); cardImageFrame.addView(cardBackImage); layout.addView(cardImageFrame); // add the data entry form LinearLayout.LayoutParams entryParams = new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); entryParams.gravity = Gravity.CENTER_VERTICAL; entry = new CreditCardEntry(context, includeExp, includeSecurity, includeZip, attrs, style); entry.setId(R.id.cc_entry); // this obnoxious 6 for bottom padding is to make the damn text centered on the image... if you know a better way... PLEASE HELP entry.setPadding(0, 0, 0, 6); entry.setLayoutParams(entryParams); // set any passed in attrs entry.setCardImageView(cardFrontImage); entry.setBackCardImage(cardBackImage); entry.setCardNumberHint(cardNumberHint); this.addView(layout); // set up optional helper text view if (includeHelper) { TextView textHelp = new TextView(context); textHelp.setId(R.id.text_helper); textHelp.setText(getResources().getString(R.string.CreditCardNumberHelp)); textHelp.setTextColor(this.textHelperColor); layoutParams = new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); layoutParams.addRule(RelativeLayout.BELOW, layout.getId()); layoutParams.addRule(RelativeLayout.CENTER_HORIZONTAL); layoutParams.setMargins(0, 15, 0, 20); textHelp.setLayoutParams(layoutParams); entry.setTextHelper(textHelp); this.addView(textHelp); } layout.addView(entry); } public void setOnCardValidCallback(CardValidCallback callback) { entry.setOnCardValidCallback(callback); } /** * all internal components will be attached this same focus listener */ @Override public void setOnFocusChangeListener(OnFocusChangeListener l) { entry.setOnFocusChangeListener(l); } @Override public OnFocusChangeListener getOnFocusChangeListener() { return entry.getOnFocusChangeListener(); } @SuppressWarnings("unused") public boolean isCreditCardValid() { return entry.isCreditCardValid(); } @SuppressWarnings("unused") public CreditCard getCreditCard() { return entry.getCreditCard(); } /** * request focus for the credit card field */ @SuppressWarnings("unused") public void focusCreditCard() { entry.focusCreditCard(); } /** * request focus for the expiration field */ @SuppressWarnings("unused") public void focusExp() { entry.focusExp(); } /** * request focus for the security code field */ @SuppressWarnings("unused") public void focusSecurityCode() { entry.focusSecurityCode(); } /** * request focus for the zip field (IF it's enabled) */ @SuppressWarnings("unused") public void focusZip() { entry.focusZip(); } /** * clear and reset the entire form */ @SuppressWarnings("unused") public void clearForm() { entry.clearAll(); } /** * @param cardNumber the card number to show * @param focusNextField true to go to next field (only works if the number is valid) */ public void setCardNumber(String cardNumber, boolean focusNextField) { entry.setCardNumber(cardNumber, focusNextField); } /** * @param expirationDate the exp to show * @param focusNextField true to go to next field (only works if the number is valid) */ @SuppressWarnings("unused") public void setExpDate(String expirationDate, boolean focusNextField) { entry.setExpDate(expirationDate, focusNextField); } /** * @param securityCode the security code to show * @param focusNextField true to go to next field (only works if the number is valid) */ @SuppressWarnings("unused") public void setSecurityCode(String securityCode, boolean focusNextField) { entry.setSecurityCode(securityCode, focusNextField); } /** * @param zip the zip to show * @param focusNextField true to go to next field (only works if the number is valid) */ @SuppressWarnings("unused") public void setZipCode(String zip, boolean focusNextField) { entry.setZipCode(zip, focusNextField); } @Override protected void dispatchSaveInstanceState(@NonNull SparseArray<Parcelable> container) { dispatchFreezeSelfOnly(container); } @Override protected void dispatchRestoreInstanceState(@NonNull SparseArray<Parcelable> container) { dispatchThawSelfOnly(container); } @Override public void onRestoreInstanceState(Parcelable state) { SavedState ss = (SavedState) state; super.onRestoreInstanceState(ss.getSuperState()); for (int i = 0; i < getChildCount(); i++) { getChildAt(i).restoreHierarchyState(ss.childrenStates); } } @Override protected Parcelable onSaveInstanceState() { Parcelable superState = super.onSaveInstanceState(); SavedState ss = new SavedState(superState); ss.childrenStates = new SparseArray(); for (int i = 0; i < getChildCount(); i++) { getChildAt(i).saveHierarchyState(ss.childrenStates); } return ss; } static class SavedState extends BaseSavedState { SparseArray childrenStates; SavedState(Parcelable superState) { super(superState); } private SavedState(Parcel in, ClassLoader classLoader) { super(in); childrenStates = in.readSparseArray(classLoader); } @Override public void writeToParcel(Parcel out, int flags) { super.writeToParcel(out, flags); out.writeSparseArray(childrenStates); } public static final Creator<SavedState> CREATOR = ParcelableCompat.newCreator(new ParcelableCompatCreatorCallbacks<SavedState>() { @Override public SavedState createFromParcel(Parcel in, ClassLoader loader) { return new SavedState(in, loader); } @Override public SavedState[] newArray(int size) { return new SavedState[size]; } }); } }
package com.ril.dao; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Timestamp; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Repository; import java.sql.Connection; import com.ril.entity.User; @Repository public class UserDao { @Autowired private Database database; private final String selectSql="select iduser,login,encryptedkey,validationkey,validaccount,hashed_password,role,timestamp_modif_pwd,encryptedkeypwd from user "; public User findByLoginAndPassword(String login,String password,boolean actif) { User u=null; try { Connection conn = database.getSqlConnection(); String sql = selectSql+"where login=? and password=? and actif=?"; System.out.println(sql); PreparedStatement ps = conn.prepareStatement(sql); ps.setString(1,login); ps.setString(2,password); ps.setBoolean(3,actif); ResultSet rs = ps.executeQuery(); while (rs.next()) { u = new User(rs.getLong(1),rs.getString(2),rs.getBytes(3),rs.getString(4),rs.getBoolean(5),false, rs.getBytes(6),"",rs.getString(7),true,rs.getTimestamp(8),rs.getBytes(9) ); } rs.close(); ps.close(); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } return u; } public User findByIduser(long iduser,boolean actif) { User u=null; try { Connection conn = database.getSqlConnection(); String sql= selectSql+"where iduser=? and actif=?"; System.out.println(sql); PreparedStatement ps = conn.prepareStatement(sql); ps.setLong(1, iduser); ps.setBoolean(2, actif); ResultSet rs = ps.executeQuery(); while (rs.next()) { u = new User(rs.getLong(1),rs.getString(2),rs.getBytes(3),rs.getString(4),rs.getBoolean(5),false, rs.getBytes(6),"",rs.getString(7),true,rs.getTimestamp(8),rs.getBytes(9) ); } rs.close(); ps.close(); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } return u; } public long Save(User u) throws Exception { long result=0; try { Connection conn = database.getSqlConnection(); String sql = "INSERT INTO USER (iduser,login,encryptedkeypwd,role,encryptedkey,validationkey,validaccount,actif,hashed_password,timestamp_modif_pwd) VALUES (?,?,?,?,?,?,?,?,?,?) "+ "ON DUPLICATE KEY UPDATE login=VALUES(login), encryptedkeypwd=VALUES(encryptedkeypwd), role=VALUES(role),encryptedkey=VALUES(encryptedkey), "+ "validationkey=VALUES(validationkey), validaccount=VALUES(validaccount), actif=VALUES(actif),hashed_password=VALUES(hashed_password), timestamp_modif_pwd=VALUES(timestamp_modif_pwd)"; System.out.println(sql); PreparedStatement ps = conn.prepareStatement(sql,Statement.RETURN_GENERATED_KEYS); ps.setLong(1,u.getIduser()); ps.setString(2,u.getLogin()); ps.setBytes(3,u.getEncryptedkeypwd()); ps.setString(4,u.getRole()); ps.setBytes(5,u.getEncryptedkey()); ps.setString(6,u.getValidationkey()); ps.setBoolean(7,u.getValidaccount()); ps.setBoolean(8,u.isActif()); ps.setBytes(9,u.getHashedPassword()); ps.setTimestamp(10,u.getTimestampModifPwd()); ps.executeUpdate(); ResultSet rspk=ps.getGeneratedKeys(); rspk.next(); result=rspk.getLong(1); rspk.close(); ps.close(); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } if (result==0) throw new Exception(String.format("Erreur dans UserDa.Save: user=%s",u.toString())); return result; } public User findByLogin(String login,boolean actif) { User u=null; try { Connection conn = database.getSqlConnection(); String sql= selectSql+"where login=? and actif=?"; System.out.println(sql); PreparedStatement ps = conn.prepareStatement(sql); ps.setString(1, login); ps.setBoolean(2, actif); ResultSet rs = ps.executeQuery(); while (rs.next()) { u = new User(rs.getLong(1),rs.getString(2),rs.getBytes(3),rs.getString(4),rs.getBoolean(5),false, rs.getBytes(6),"",rs.getString(7),true,rs.getTimestamp(8),rs.getBytes(9) ); } rs.close(); ps.close(); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } return u; } public User findByIduserAndValidationkey(Long iduser, String validationkey,boolean actif) { User u=null; try { Connection conn = database.getSqlConnection(); String sql= selectSql+"where iduser=? and validationkey=? and actif=?"; System.out.println(sql); PreparedStatement ps = conn.prepareStatement(sql); ps.setLong(1, iduser); ps.setString(2, validationkey); ps.setBoolean(3,actif); ResultSet rs = ps.executeQuery(); while (rs.next()) { u = new User(rs.getLong(1),rs.getString(2),rs.getBytes(3),rs.getString(4),rs.getBoolean(5),false, rs.getBytes(6),"",rs.getString(7),true,rs.getTimestamp(8),rs.getBytes(9) ); } rs.close(); ps.close(); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } return u; } public User findByLoginAndHashedPassword(String login,byte[] hashedPassword,boolean actif) { User u=null; try { Connection conn = database.getSqlConnection(); String sql=selectSql+"where login=? and hashed_Password=? and actif=?"; System.out.println(sql); PreparedStatement ps = conn.prepareStatement(sql); ps.setString(1, login); ps.setBytes(2, hashedPassword); ps.setBoolean(3, actif); ResultSet rs = ps.executeQuery(); while (rs.next()) { u = new User(rs.getLong(1),rs.getString(2),rs.getBytes(3),rs.getString(4),rs.getBoolean(5),false, rs.getBytes(6),"",rs.getString(7),true,rs.getTimestamp(8),rs.getBytes(9) ); } rs.close(); ps.close(); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } return u; } public User findByIduserAndEncryptedkey(Long iduser, byte[] key,boolean actif) { User u=null; try { Connection conn = database.getSqlConnection(); String sql= selectSql+"where iduser=? and encryptedkey=? and actif=?"; System.out.println(sql); PreparedStatement ps = conn.prepareStatement(sql); ps.setLong(1, iduser); ps.setBytes(2, key); ps.setBoolean(3, actif); ResultSet rs = ps.executeQuery(); while (rs.next()) { u = new User(rs.getLong(1),rs.getString(2),rs.getBytes(3),rs.getString(4),rs.getBoolean(5),false, rs.getBytes(6),"",rs.getString(7),true,rs.getTimestamp(8),rs.getBytes(9) ); } rs.close(); ps.close(); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } return u; } public int ActiverDesactiverByIduser(long iduser, boolean actif) throws Exception { int result = 0; try { Connection conn = database.getSqlConnection(); String sql = "update user set actif=? where iduser=?"; PreparedStatement ps = conn.prepareStatement(sql); ps.setBoolean(1, actif); ps.setLong(2, iduser); result = ps.executeUpdate(); ps.close(); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } if (result == 0) throw new Exception( String.format("Erreur dans user.ActiverDesactiverByIduser: iduser=%d, actif=%s",iduser, actif)); return result; } public int ActiverDesactiverUser(User u, boolean actif) throws Exception { return ActiverDesactiverByIduser(u.getIduser(),actif); } public User findByIduserAndEncryptedkeypwd(Long iduser, byte[] key,boolean actif) { User u=null; try { Connection conn = database.getSqlConnection(); String sql=selectSql+"where iduser=? and encryptedkey=? and actif=?"; System.out.println(sql); PreparedStatement ps = conn.prepareStatement(sql); ps.setLong(1, iduser); ps.setBytes(2, key); ps.setBoolean(3, actif); ResultSet rs = ps.executeQuery(); while (rs.next()) { u = new User(rs.getLong(1),rs.getString(2),rs.getBytes(3),rs.getString(4),rs.getBoolean(5),false, rs.getBytes(6),"",rs.getString(7),true,rs.getTimestamp(8),rs.getBytes(9) ); } rs.close(); ps.close(); } catch (SQLException e) { // TODO Auto-generated catch block e.printStackTrace(); } return u; } }
package org.apache.solr.cloud; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.Socket; import java.net.UnknownHostException; import java.nio.channels.SelectionKey; import java.nio.channels.SocketChannel; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.concurrent.ConcurrentHashMap; import javax.management.JMException; import com.google.common.collect.Ordering; import com.google.common.util.concurrent.AtomicLongMap; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.Watcher; import org.apache.zookeeper.data.Stat; import org.apache.zookeeper.jmx.ManagedUtil; import org.apache.zookeeper.server.NIOServerCnxn; import org.apache.zookeeper.server.NIOServerCnxnFactory; import org.apache.zookeeper.server.ServerCnxn; import org.apache.zookeeper.server.ServerCnxnFactory; import org.apache.zookeeper.server.ServerConfig; import org.apache.zookeeper.server.SessionTracker.Session; import org.apache.zookeeper.server.ZKDatabase; import org.apache.zookeeper.server.ZooKeeperServer; import org.apache.zookeeper.server.persistence.FileTxnSnapLog; import org.apache.zookeeper.server.quorum.QuorumPeerConfig.ConfigException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ZkTestServer { public static final int TICK_TIME = 1000; private static Logger log = LoggerFactory.getLogger(ZkTestServer.class); protected final ZKServerMain zkServer = new ZKServerMain(); private String zkDir; private int clientPort; private volatile Thread zooThread; private int theTickTime = TICK_TIME; static public enum LimitViolationAction { IGNORE, REPORT, FAIL, } class ZKServerMain { private ServerCnxnFactory cnxnFactory; private ZooKeeperServer zooKeeperServer; private LimitViolationAction violationReportAction = LimitViolationAction.REPORT; private WatchLimiter limiter = new WatchLimiter(1, LimitViolationAction.IGNORE); protected void initializeAndRun(String[] args) throws ConfigException, IOException { try { ManagedUtil.registerLog4jMBeans(); } catch (JMException e) { log.warn("Unable to register log4j JMX control", e); } ServerConfig config = new ServerConfig(); if (args.length == 1) { config.parse(args[0]); } else { config.parse(args); } runFromConfig(config); } private class WatchLimit { private long limit; private final String desc; private LimitViolationAction action; private AtomicLongMap<String> counters = AtomicLongMap.create(); private ConcurrentHashMap<String,Long> maxCounters = new ConcurrentHashMap<>(); WatchLimit(long limit, String desc, LimitViolationAction action) { this.limit = limit; this.desc = desc; this.action = action; } public void setAction(LimitViolationAction action) { this.action = action; } public void setLimit(long limit) { this.limit = limit; } public void updateForWatch(String key, Watcher watcher) { if (watcher != null) { log.debug("Watch added: {}: {}", desc, key); long count = counters.incrementAndGet(key); Long lastCount = maxCounters.get(key); if (lastCount == null || count > lastCount) { maxCounters.put(key, count); } if (count > limit && action != LimitViolationAction.IGNORE) { String msg = "Number of watches created in parallel for data: " + key + ", type: " + desc + " exceeds limit (" + count + " > " + limit + ")"; log.warn("{}", msg); if (action == LimitViolationAction.FAIL) throw new AssertionError(msg); } } } public void updateForFire(WatchedEvent event) { log.debug("Watch fired: {}: {}", desc, event.getPath()); counters.decrementAndGet(event.getPath()); } private String reportLimitViolations() { Object[] maxKeys = maxCounters.keySet().toArray(); Arrays.sort(maxKeys, new Comparator<Object>() { private final Comparator<Long> valComp = Ordering.natural().reverse(); @Override public int compare(Object o1, Object o2) { return valComp.compare(maxCounters.get(o1), maxCounters.get(o2)); } }); StringBuilder sb = new StringBuilder(); boolean first = true; for (Object key : maxKeys) { long value = maxCounters.get(key); if (value <= limit) continue; if (first) { sb.append("\nMaximum concurrent ").append(desc).append(" watches above limit:\n\n"); first = false; } sb.append("\t").append(maxCounters.get(key)).append('\t').append(key).append('\n'); } return sb.toString(); } } public class WatchLimiter { WatchLimit statLimit; WatchLimit dataLimit; WatchLimit childrenLimit; private WatchLimiter (long limit, LimitViolationAction action) { statLimit = new WatchLimit(limit, "create/delete", action); dataLimit = new WatchLimit(limit, "data", action); childrenLimit = new WatchLimit(limit, "children", action); } public void setAction(LimitViolationAction action) { statLimit.setAction(action); dataLimit.setAction(action); childrenLimit.setAction(action); } public void setLimit(long limit) { statLimit.setLimit(limit); dataLimit.setLimit(limit); childrenLimit.setLimit(limit); } public String reportLimitViolations() { return statLimit.reportLimitViolations() + dataLimit.reportLimitViolations() + childrenLimit.reportLimitViolations(); } private void updateForFire(WatchedEvent event) { switch (event.getType()) { case None: break; case NodeCreated: case NodeDeleted: statLimit.updateForFire(event); break; case NodeDataChanged: dataLimit.updateForFire(event); break; case NodeChildrenChanged: childrenLimit.updateForFire(event); break; } } } private class TestServerCnxn extends NIOServerCnxn { private final WatchLimiter limiter; public TestServerCnxn(ZooKeeperServer zk, SocketChannel sock, SelectionKey sk, NIOServerCnxnFactory factory, WatchLimiter limiter) throws IOException { super(zk, sock, sk, factory); this.limiter = limiter; } @Override public synchronized void process(WatchedEvent event) { limiter.updateForFire(event); super.process(event); } } private class TestServerCnxnFactory extends NIOServerCnxnFactory { private final WatchLimiter limiter; public TestServerCnxnFactory(WatchLimiter limiter) throws IOException { super(); this.limiter = limiter; } @Override protected NIOServerCnxn createConnection(SocketChannel sock, SelectionKey sk) throws IOException { return new TestServerCnxn(zkServer, sock, sk, this, limiter); } } private class TestZKDatabase extends ZKDatabase { private final WatchLimiter limiter; public TestZKDatabase(FileTxnSnapLog snapLog, WatchLimiter limiter) { super(snapLog); this.limiter = limiter; } @Override public Stat statNode(String path, ServerCnxn serverCnxn) throws KeeperException.NoNodeException { limiter.statLimit.updateForWatch(path, serverCnxn); return super.statNode(path, serverCnxn); } @Override public byte[] getData(String path, Stat stat, Watcher watcher) throws KeeperException.NoNodeException { limiter.dataLimit.updateForWatch(path, watcher); return super.getData(path, stat, watcher); } @Override public List<String> getChildren(String path, Stat stat, Watcher watcher) throws KeeperException.NoNodeException { limiter.childrenLimit.updateForWatch(path, watcher); return super.getChildren(path, stat, watcher); } } /** * Run from a ServerConfig. * @param config ServerConfig to use. * @throws IOException If there is a low-level I/O error. */ public void runFromConfig(ServerConfig config) throws IOException { log.info("Starting server"); try { // ZooKeeper maintains a static collection of AuthenticationProviders, so // we make sure the SASL provider is loaded so that it can be used in // subsequent tests. System.setProperty("zookeeper.authProvider.1", "org.apache.zookeeper.server.auth.SASLAuthenticationProvider"); // Note that this thread isn't going to be doing anything else, // so rather than spawning another thread, we will just call // run() in this thread. // create a file logger url from the command line args FileTxnSnapLog ftxn = new FileTxnSnapLog(new File( config.getDataLogDir()), new File(config.getDataDir())); zooKeeperServer = new ZooKeeperServer(ftxn, config.getTickTime(), config.getMinSessionTimeout(), config.getMaxSessionTimeout(), null /* this is not used */, new TestZKDatabase(ftxn, limiter)); cnxnFactory = new TestServerCnxnFactory(limiter); cnxnFactory.configure(config.getClientPortAddress(), config.getMaxClientCnxns()); cnxnFactory.startup(zooKeeperServer); cnxnFactory.join(); // if (zooKeeperServer.isRunning()) { zkServer.shutdown(); // } if (violationReportAction != LimitViolationAction.IGNORE) { String limitViolations = limiter.reportLimitViolations(); if (!limitViolations.isEmpty()) { log.warn("Watch limit violations: {}", limitViolations); if (violationReportAction == LimitViolationAction.FAIL) { throw new AssertionError("Parallel watch limits violated"); } } } } catch (InterruptedException e) { // warn, but generally this is ok log.warn("Server interrupted", e); } } /** * Shutdown the serving instance * @throws IOException If there is a low-level I/O error. */ protected void shutdown() throws IOException { zooKeeperServer.shutdown(); ZKDatabase zkDb = zooKeeperServer.getZKDatabase(); if (cnxnFactory != null && cnxnFactory.getLocalPort() != 0) { waitForServerDown(getZkHost() + ":" + getPort(), 5000); } if (cnxnFactory != null) { cnxnFactory.shutdown(); try { cnxnFactory.join(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } if (zkDb != null) { zkDb.close(); } } public int getLocalPort() { if (cnxnFactory == null) { throw new IllegalStateException("A port has not yet been selected"); } int port; try { port = cnxnFactory.getLocalPort(); } catch (NullPointerException e) { throw new IllegalStateException("A port has not yet been selected"); } if (port == 0) { throw new IllegalStateException("A port has not yet been selected"); } return port; } public void setViolationReportAction(LimitViolationAction violationReportAction) { this.violationReportAction = violationReportAction; } public WatchLimiter getLimiter() { return limiter; } } public ZkTestServer(String zkDir) { this.zkDir = zkDir; } public ZkTestServer(String zkDir, int port) { this.zkDir = zkDir; this.clientPort = port; String reportAction = System.getProperty("tests.zk.violationReportAction"); if (reportAction != null) { log.info("Overriding violation report action to: {}", reportAction); setViolationReportAction(LimitViolationAction.valueOf(reportAction)); } String limiterAction = System.getProperty("tests.zk.limiterAction"); if (limiterAction != null) { log.info("Overriding limiter action to: {}", limiterAction); getLimiter().setAction(LimitViolationAction.valueOf(limiterAction)); } } public String getZkHost() { return "127.0.0.1:" + zkServer.getLocalPort(); } public String getZkAddress() { return "127.0.0.1:" + zkServer.getLocalPort() + "/solr"; } public int getPort() { return zkServer.getLocalPort(); } public void expire(final long sessionId) { zkServer.zooKeeperServer.expire(new Session() { @Override public long getSessionId() { return sessionId; } @Override public int getTimeout() { return 4000; } @Override public boolean isClosing() { return false; } }); } public void run() throws InterruptedException { log.info("STARTING ZK TEST SERVER"); // we don't call super.setUp zooThread = new Thread() { @Override public void run() { ServerConfig config = new ServerConfig() { { setClientPort(ZkTestServer.this.clientPort); this.dataDir = zkDir; this.dataLogDir = zkDir; this.tickTime = theTickTime; } public void setClientPort(int clientPort) { if (clientPortAddress != null) { try { this.clientPortAddress = new InetSocketAddress( InetAddress.getByName(clientPortAddress.getHostName()), clientPort); } catch (UnknownHostException e) { throw new RuntimeException(e); } } else { this.clientPortAddress = new InetSocketAddress(clientPort); } System.out.println("client port:" + this.clientPortAddress); } }; try { zkServer.runFromConfig(config); } catch (Throwable e) { throw new RuntimeException(e); } } }; zooThread.setDaemon(true); zooThread.start(); int cnt = 0; int port = -1; try { port = getPort(); } catch(IllegalStateException e) { } while (port < 1) { Thread.sleep(100); try { port = getPort(); } catch(IllegalStateException e) { } if (cnt == 500) { throw new RuntimeException("Could not get the port for ZooKeeper server"); } cnt++; } log.info("start zk server on port:" + port); } @SuppressWarnings("deprecation") public void shutdown() throws IOException, InterruptedException { // TODO: this can log an exception while trying to unregister a JMX MBean zkServer.shutdown(); try { zooThread.join(); } catch (NullPointerException e) { // okay } } public static boolean waitForServerDown(String hp, long timeout) { long start = System.currentTimeMillis(); while (true) { try { HostPort hpobj = parseHostPortList(hp).get(0); send4LetterWord(hpobj.host, hpobj.port, "stat"); } catch (IOException e) { return true; } if (System.currentTimeMillis() > start + timeout) { break; } try { Thread.sleep(250); } catch (InterruptedException e) { // ignore } } return false; } public static class HostPort { String host; int port; HostPort(String host, int port) { this.host = host; this.port = port; } } /** * Send the 4letterword * @param host the destination host * @param port the destination port * @param cmd the 4letterword * @return server response */ public static String send4LetterWord(String host, int port, String cmd) throws IOException { log.info("connecting to " + host + " " + port); BufferedReader reader = null; try (Socket sock = new Socket(host, port)) { OutputStream outstream = sock.getOutputStream(); outstream.write(cmd.getBytes(StandardCharsets.US_ASCII)); outstream.flush(); // this replicates NC - close the output stream before reading sock.shutdownOutput(); reader = new BufferedReader( new InputStreamReader(sock.getInputStream(), "US-ASCII")); StringBuilder sb = new StringBuilder(); String line; while ((line = reader.readLine()) != null) { sb.append(line).append("\n"); } return sb.toString(); } finally { if (reader != null) { reader.close(); } } } public static List<HostPort> parseHostPortList(String hplist) { ArrayList<HostPort> alist = new ArrayList<>(); for (String hp : hplist.split(",")) { int idx = hp.lastIndexOf(':'); String host = hp.substring(0, idx); int port; try { port = Integer.parseInt(hp.substring(idx + 1)); } catch (RuntimeException e) { throw new RuntimeException("Problem parsing " + hp + e.toString()); } alist.add(new HostPort(host, port)); } return alist; } public int getTheTickTime() { return theTickTime; } public void setTheTickTime(int theTickTime) { this.theTickTime = theTickTime; } public String getZkDir() { return zkDir; } public void setViolationReportAction(LimitViolationAction violationReportAction) { zkServer.setViolationReportAction(violationReportAction); } public ZKServerMain.WatchLimiter getLimiter() { return zkServer.getLimiter(); } }
package org.apache.solr.update.processor; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.params.ModifiableSolrParams; import org.apache.solr.common.params.UpdateParams; import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrInputDocument; import org.apache.solr.core.SolrCore; import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.util.RefCounted; import org.junit.Assume; import org.junit.BeforeClass; import javax.script.ScriptEngineManager; import javax.script.ScriptEngine; import java.util.ArrayList; import java.util.List; /** * Tests {@link StatelessScriptUpdateProcessorFactory}. * * TODO: This test, to run from an IDE, requires a working directory of <path-to>/solr/core/src/test-files. Fix! */ public class StatelessScriptUpdateProcessorFactoryTest extends UpdateProcessorTestBase { @BeforeClass public static void beforeClass() throws Exception { Assume.assumeNotNull((new ScriptEngineManager()).getEngineByExtension("js")); initCore("solrconfig-script-updateprocessor.xml", "schema12.xml"); } /** * simple test of a basic script processor chain using the full * RequestHandler + UpdateProcessorChain flow */ public void testFullRequestHandlerFlow() throws Exception { assertU("Simple assertion that adding a document works", adoc("id", "4055", "subject", "Hoss")); assertU(commit()); assertQ("couldn't find hoss using script added field", req("q","script_added_i:[40 TO 45]", "fq","id:4055") ,"//result[@numFound=1]" ,"//str[@name='id'][.='4055']" ); // clean up processDeleteById("run-no-scripts","4055"); processCommit("run-no-scripts"); } public void testSingleScript() throws Exception { SolrCore core = h.getCore(); UpdateRequestProcessorChain chained = core.getUpdateProcessingChain("single-script"); final StatelessScriptUpdateProcessorFactory factory = ((StatelessScriptUpdateProcessorFactory) chained.getFactories()[0]); final List<String> functionMessages = new ArrayList<String>(); factory.setScriptEngineCustomizer(new ScriptEngineCustomizer() { @Override public void customize(ScriptEngine engine) { engine.put("testCase", StatelessScriptUpdateProcessorFactoryTest.this); engine.put("functionMessages", functionMessages); } }); assertNotNull(chained); SolrInputDocument d = processAdd("single-script", doc(f("id", "1"), f("name", " foo "), f("subject", "bar"))); processCommit("run-no-scripts"); assertQ("couldn't find doc by id", req("q","id:1") , "//result[@numFound=1]"); processDeleteById("single-script","1"); processCommit("single-script"); assertQ("found deleted doc", req("q","id:1") , "//result[@numFound=0]"); assertEquals(3, functionMessages.size()); assertTrue(functionMessages.contains("processAdd0")); assertTrue(functionMessages.contains("processDelete0")); assertTrue(functionMessages.contains("processCommit0")); } public void testMultipleScripts() throws Exception { SolrCore core = h.getCore(); for (final String chain : new String[] {"dual-scripts-arr", "dual-scripts-strs"}) { UpdateRequestProcessorChain chained = core.getUpdateProcessingChain(chain); final StatelessScriptUpdateProcessorFactory factory = ((StatelessScriptUpdateProcessorFactory) chained.getFactories()[0]); final List<String> functionMessages = new ArrayList<String>(); ScriptEngineCustomizer customizer = new ScriptEngineCustomizer() { @Override public void customize(ScriptEngine engine) { engine.put("testCase", StatelessScriptUpdateProcessorFactoryTest.this); engine.put("functionMessages", functionMessages); } }; factory.setScriptEngineCustomizer(customizer); assertNotNull(chained); SolrInputDocument d = processAdd(chain, doc(f("id", "2"), f("name", " foo "), f("subject", "bar"))); assertEquals(chain + " didn't add Double field", 42.3d, d.getFieldValue("script_added_d")); assertEquals(chain + " didn't add integer field", new Integer(42), d.getFieldValue("script_added_i")); processCommit("run-no-scripts"); assertQ(chain + ": couldn't find doc by id", req("q","id:2") , "//result[@numFound=1]"); processDeleteById(chain, "2"); processCommit(chain); assertEquals(chain, 6, functionMessages.size()); assertTrue(chain, functionMessages.contains("processAdd0")); assertTrue(chain, functionMessages.contains("processAdd1")); assertTrue(chain + ": script order doesn't match conf order", functionMessages.indexOf("processAdd0") < functionMessages.indexOf("processAdd1")); assertTrue(chain, functionMessages.contains("processDelete0")); assertTrue(chain, functionMessages.contains("processDelete1")); assertTrue(chain + ": script order doesn't match conf order", functionMessages.indexOf("processDelete0") < functionMessages.indexOf("processDelete1")); assertTrue(chain, functionMessages.contains("processCommit0")); assertTrue(chain, functionMessages.contains("processCommit1")); assertTrue(chain + ": script order doesn't match conf order", functionMessages.indexOf("processCommit0") < functionMessages.indexOf("processCommit1")); finish(chain); assertEquals(chain, 8, functionMessages.size()); assertTrue(chain, functionMessages.contains("finish0")); assertTrue(chain, functionMessages.contains("finish1")); assertTrue(chain + ": script order doesn't match conf order", functionMessages.indexOf("finish0") < functionMessages.indexOf("finish1")); assertQ(chain + ": found deleted doc", req("q","id:2") , "//result[@numFound=0]"); } } public void testConditionalExecution() throws Exception { for (String chain : new String[] {"conditional-script", "conditional-scripts"}) { ModifiableSolrParams reqParams = new ModifiableSolrParams(); SolrInputDocument d = processAdd(chain, reqParams, doc(f("id", "3"), f("name", " foo "), f("subject", "bar"))); assertFalse(chain + " added String field despite condition", d.containsKey("script_added_s")); assertFalse(chain + " added Double field despite condition", d.containsKey("script_added_d")); reqParams.add("go-for-it", "true"); d = processAdd(chain, reqParams, doc(f("id", "4"), f("name", " foo "), f("subject", "bar"))); assertEquals(chain + " didn't add String field", "i went for it", d.getFieldValue("script_added_s")); assertEquals(chain +" didn't add Double field", 42.3d, d.getFieldValue("script_added_d")); assertEquals(chain + " didn't add integer field", new Integer(42), d.getFieldValue("script_added_i")); } } public void testForceEngine() throws Exception { Assume.assumeNotNull((new ScriptEngineManager()).getEngineByName("javascript")); final String chain = "force-script-engine"; SolrInputDocument d = processAdd(chain, doc(f("id", "5"), f("name", " foo "), f("subject", "bar"))); assertEquals(chain +" didn't add Double field", 42.3d, d.getFieldValue("script_added_d")); assertEquals(chain + " didn't add integer field", new Integer(42), d.getFieldValue("script_added_i")); } public void testPropogatedException() throws Exception { final String chain = "error-on-add"; try { SolrInputDocument d = processAdd(chain, doc(f("id", "5"), f("name", " foo "), f("subject", "bar"))); } catch (SolrException e) { assertTrue("Exception doesn't contain script error string: " + e.getMessage(), 0 < e.getMessage().indexOf("no-soup-fo-you")); return; } fail("Did not get exception from script"); } public void testMissingFunctions() throws Exception { final String chain = "missing-functions"; try { SolrInputDocument d = processAdd(chain, doc(f("id", "5"), f("name", " foo "), f("subject", "bar"))); } catch (SolrException e) { assertTrue("Exception doesn't contain expected error: " + e.getMessage(), 0 < e.getMessage().indexOf("processAdd")); return; } fail("Did not get exception from script"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.adapter.elasticsearch; import org.apache.calcite.jdbc.CalciteConnection; import org.apache.calcite.rel.RelFieldCollation; import org.apache.calcite.schema.SchemaPlus; import org.apache.calcite.schema.impl.ViewTable; import org.apache.calcite.test.CalciteAssert; import org.apache.calcite.test.ElasticsearchChecker; import org.apache.calcite.util.Bug; import org.apache.calcite.util.TestUtil; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; import com.google.common.io.LineProcessor; import com.google.common.io.Resources; import org.junit.jupiter.api.Assumptions; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.parallel.ResourceAccessMode; import org.junit.jupiter.api.parallel.ResourceLock; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.function.Consumer; import static java.util.Objects.requireNonNull; /** * Set of tests for ES adapter. Uses real instance via {@link EmbeddedElasticsearchPolicy}. Document * source is local {@code zips-mini.json} file (located in test classpath). */ @ResourceLock(value = "elasticsearch-scrolls", mode = ResourceAccessMode.READ) class ElasticSearchAdapterTest { public static final EmbeddedElasticsearchPolicy NODE = EmbeddedElasticsearchPolicy.create(); /** Default index/type name. */ private static final String ZIPS = "zips"; private static final int ZIPS_SIZE = 149; /** * Used to create {@code zips} index and insert zip data in bulk. * @throws Exception when instance setup failed */ @BeforeAll public static void setupInstance() throws Exception { final Map<String, String> mapping = ImmutableMap.of("city", "keyword", "state", "keyword", "pop", "long"); NODE.createIndex(ZIPS, mapping); // load records from file final List<ObjectNode> bulk = new ArrayList<>(); Resources.readLines(ElasticSearchAdapterTest.class.getResource("/zips-mini.json"), StandardCharsets.UTF_8, new LineProcessor<Void>() { @Override public boolean processLine(String line) throws IOException { line = line.replace("_id", "id"); // _id is a reserved attribute in ES bulk.add((ObjectNode) NODE.mapper().readTree(line)); return true; } @Override public Void getResult() { return null; } }); if (bulk.isEmpty()) { throw new IllegalStateException("No records to index. Empty file ?"); } NODE.insertBulk(ZIPS, bulk); } private static Connection createConnection() throws SQLException { final Connection connection = DriverManager.getConnection("jdbc:calcite:lex=JAVA"); final SchemaPlus root = connection.unwrap(CalciteConnection.class).getRootSchema(); root.add("elastic", new ElasticsearchSchema(NODE.restClient(), NODE.mapper(), ZIPS)); // add calcite view programmatically final String viewSql = "select cast(_MAP['city'] AS varchar(20)) AS \"city\", " + " cast(_MAP['loc'][0] AS float) AS \"longitude\",\n" + " cast(_MAP['loc'][1] AS float) AS \"latitude\",\n" + " cast(_MAP['pop'] AS integer) AS \"pop\", " + " cast(_MAP['state'] AS varchar(2)) AS \"state\", " + " cast(_MAP['id'] AS varchar(5)) AS \"id\" " + "from \"elastic\".\"zips\""; root.add("zips", ViewTable.viewMacro(root, viewSql, Collections.singletonList("elastic"), Arrays.asList("elastic", "view"), false)); return connection; } private CalciteAssert.AssertThat calciteAssert() { return CalciteAssert.that() .with(ElasticSearchAdapterTest::createConnection); } /** Tests using a Calcite view. */ @Test void view() { calciteAssert() .query("select * from zips where city = 'BROOKLYN'") .returns("city=BROOKLYN; longitude=-73.956985; latitude=40.646694; " + "pop=111396; state=NY; id=11226\n") .returnsCount(1); } @Test void emptyResult() { CalciteAssert.that() .with(ElasticSearchAdapterTest::createConnection) .query("select * from zips limit 0") .returnsCount(0); CalciteAssert.that() .with(ElasticSearchAdapterTest::createConnection) .query("select * from elastic.zips where _MAP['Foo'] = '_MISSING_'") .returnsCount(0); } @Test void basic() { CalciteAssert.that() .with(ElasticSearchAdapterTest::createConnection) // by default elastic returns max 10 records .query("select * from elastic.zips") .runs(); CalciteAssert.that() .with(ElasticSearchAdapterTest::createConnection) .query("select * from elastic.zips where _MAP['city'] = 'BROOKLYN'") .returnsCount(1); CalciteAssert.that() .with(ElasticSearchAdapterTest::createConnection) .query("select * from elastic.zips where" + " _MAP['city'] in ('BROOKLYN', 'WASHINGTON')") .returnsCount(2); // lower-case CalciteAssert.that() .with(ElasticSearchAdapterTest::createConnection) .query("select * from elastic.zips where " + "_MAP['city'] in ('brooklyn', 'Brooklyn', 'BROOK') ") .returnsCount(0); // missing field CalciteAssert.that() .with(ElasticSearchAdapterTest::createConnection) .query("select * from elastic.zips where _MAP['CITY'] = 'BROOKLYN'") .returnsCount(0); // limit 0 CalciteAssert.that() .with(ElasticSearchAdapterTest::createConnection) .query("select * from elastic.zips limit 0") .returnsCount(0); } @Test void testSort() { final String explain = "PLAN=ElasticsearchToEnumerableConverter\n" + " ElasticsearchSort(sort0=[$4], dir0=[ASC])\n" + " ElasticsearchProject(city=[CAST(ITEM($0, 'city')):VARCHAR(20)], longitude=[CAST(ITEM(ITEM($0, 'loc'), 0)):FLOAT], latitude=[CAST(ITEM(ITEM($0, 'loc'), 1)):FLOAT], pop=[CAST(ITEM($0, 'pop')):INTEGER], state=[CAST(ITEM($0, 'state')):VARCHAR(2)], id=[CAST(ITEM($0, 'id')):VARCHAR(5)])\n" + " ElasticsearchTableScan(table=[[elastic, zips]])"; calciteAssert() .query("select * from zips order by state") .returnsCount(ZIPS_SIZE) .returns(sortedResultSetChecker("state", RelFieldCollation.Direction.ASCENDING)) .explainContains(explain); } @Test void testSortLimit() { final String sql = "select state, pop from zips\n" + "order by state, pop offset 2 rows fetch next 3 rows only"; calciteAssert() .query(sql) .returnsUnordered("state=AK; pop=32383", "state=AL; pop=42124", "state=AL; pop=43862") .queryContains( ElasticsearchChecker.elasticsearchChecker( "'_source' : ['state', 'pop']", "sort: [ {state: 'asc'}, {pop: 'asc'}]", "from: 2", "size: 3")); } /** * Throws {@code AssertionError} if result set is not sorted by {@code column}. * {@code null}s are ignored. * * @param column column to be extracted (as comparable object). * @param direction ascending / descending * @return consumer which throws exception */ private static Consumer<ResultSet> sortedResultSetChecker(String column, RelFieldCollation.Direction direction) { requireNonNull(column, "column"); return rset -> { try { final List<Comparable<?>> states = new ArrayList<>(); while (rset.next()) { Object object = rset.getObject(column); if (object != null && !(object instanceof Comparable)) { final String message = String.format(Locale.ROOT, "%s is not comparable", object); throw new IllegalStateException(message); } if (object != null) { //noinspection rawtypes states.add((Comparable) object); } } for (int i = 0; i < states.size() - 1; i++) { //noinspection rawtypes final Comparable current = states.get(i); //noinspection rawtypes final Comparable next = states.get(i + 1); //noinspection unchecked final int cmp = current.compareTo(next); if (direction == RelFieldCollation.Direction.ASCENDING ? cmp > 0 : cmp < 0) { final String message = String.format(Locale.ROOT, "Column %s NOT sorted (%s): %s (index:%d) > %s (index:%d) count: %d", column, direction, current, i, next, i + 1, states.size()); throw new AssertionError(message); } } } catch (SQLException e) { throw TestUtil.rethrow(e); } }; } /** * Sorting (and aggregating) directly on items without a view. * * <p>Queries of type: * {@code select _MAP['a'] from elastic order by _MAP['b']} */ @Test void testSortNoSchema() { CalciteAssert.that() .with(ElasticSearchAdapterTest::createConnection) .query("select * from elastic.zips order by _MAP['city']") .returnsCount(ZIPS_SIZE); CalciteAssert.that() .with(ElasticSearchAdapterTest::createConnection) .query("select * from elastic.zips where _MAP['state'] = 'NY' order by _MAP['city']") .queryContains( ElasticsearchChecker.elasticsearchChecker( "query:{'constant_score':{filter:{term:{state:'NY'}}}}", "sort:[{city:'asc'}]", String.format(Locale.ROOT, "size:%s", ElasticsearchTransport.DEFAULT_FETCH_SIZE))) .returnsOrdered( "_MAP={id=11226, city=BROOKLYN, loc=[-73.956985, 40.646694], pop=111396, state=NY}", "_MAP={id=11373, city=JACKSON HEIGHTS, loc=[-73.878551, 40.740388], pop=88241, state=NY}", "_MAP={id=10021, city=NEW YORK, loc=[-73.958805, 40.768476], pop=106564, state=NY}"); CalciteAssert.that() .with(ElasticSearchAdapterTest::createConnection) .query("select _MAP['state'] from elastic.zips order by _MAP['city']") .returnsCount(ZIPS_SIZE); CalciteAssert.that() .with(ElasticSearchAdapterTest::createConnection) .query("select * from elastic.zips where _MAP['state'] = 'NY' or " + "_MAP['city'] = 'BROOKLYN'" + " order by _MAP['city']") .queryContains( ElasticsearchChecker.elasticsearchChecker( "query:{'dis_max':{'queries':[{'bool':{'should':" + "[{'term':{'state':'NY'}},{'term':" + "{'city':'BROOKLYN'}}]}}]}},'sort':[{'city':'asc'}]", String.format(Locale.ROOT, "size:%s", ElasticsearchTransport.DEFAULT_FETCH_SIZE))); CalciteAssert.that() .with(ElasticSearchAdapterTest::createConnection) .query("select _MAP['city'] from elastic.zips where _MAP['state'] = 'NY' " + "order by _MAP['city']") .returnsOrdered("EXPR$0=BROOKLYN", "EXPR$0=JACKSON HEIGHTS", "EXPR$0=NEW YORK"); CalciteAssert.that() .with(ElasticSearchAdapterTest::createConnection) .query("select _MAP['city'] as city, _MAP['state'] from elastic.zips " + "order by _MAP['city'] asc") .returns(sortedResultSetChecker("city", RelFieldCollation.Direction.ASCENDING)) .returnsCount(ZIPS_SIZE); CalciteAssert.that() .with(ElasticSearchAdapterTest::createConnection) .query("select _MAP['city'] as city, _MAP['state'] from elastic.zips " + "order by _MAP['city'] desc") .returns(sortedResultSetChecker("city", RelFieldCollation.Direction.DESCENDING)) .returnsCount(ZIPS_SIZE); CalciteAssert.that() .with(ElasticSearchAdapterTest::createConnection) .query("select max(_MAP['pop']), min(_MAP['pop']), _MAP['state'] from elastic.zips " + "group by _MAP['state'] order by _MAP['state'] limit 3") .returnsOrdered("EXPR$0=32383.0; EXPR$1=23238.0; EXPR$2=AK", "EXPR$0=44165.0; EXPR$1=42124.0; EXPR$2=AL", "EXPR$0=53532.0; EXPR$1=37428.0; EXPR$2=AR"); CalciteAssert.that() .with(ElasticSearchAdapterTest::createConnection) .query("select max(_MAP['pop']), min(_MAP['pop']), _MAP['state'] from elastic.zips " + "where _MAP['state'] = 'NY' group by _MAP['state'] order by _MAP['state'] limit 3") .returns("EXPR$0=111396.0; EXPR$1=88241.0; EXPR$2=NY\n"); } /** Tests sorting by multiple fields (in different direction: asc/desc). */ @Test void sortAscDesc() { final String sql = "select city, state, pop from zips\n" + "order by pop desc, state asc, city desc limit 3"; calciteAssert() .query(sql) .returnsOrdered("city=CHICAGO; state=IL; pop=112047", "city=BROOKLYN; state=NY; pop=111396", "city=NEW YORK; state=NY; pop=106564") .queryContains( ElasticsearchChecker.elasticsearchChecker( "'_source':['city','state','pop']", "sort:[{pop:'desc'}, {state:'asc'}, {city:'desc'}]", "size:3")); } @Test void testOffsetLimit() { final String sql = "select state, id from zips\n" + "offset 2 fetch next 3 rows only"; calciteAssert() .query(sql) .runs() .returnsCount(3) .queryContains( ElasticsearchChecker.elasticsearchChecker( "_source : ['state', 'id']", "from: 2", "size: 3")); } @Test void testLimit() { final String sql = "select state, id from zips\n" + "fetch next 3 rows only"; calciteAssert() .query(sql) .runs() .returnsCount(3) .queryContains( ElasticsearchChecker.elasticsearchChecker( "'_source':['state','id']", "size:3")); } @Test void limit2() { final String sql = "select id from zips limit 5"; calciteAssert() .query(sql) .runs() .returnsCount(5) .queryContains( ElasticsearchChecker.elasticsearchChecker( "'_source':['id']", "size:5")); } @Test void testFilterSort() { final String sql = "select * from zips\n" + "where state = 'CA' and pop >= 94000\n" + "order by state, pop"; final String explain = "PLAN=ElasticsearchToEnumerableConverter\n" + " ElasticsearchSort(sort0=[$4], sort1=[$3], dir0=[ASC], dir1=[ASC])\n" + " ElasticsearchProject(city=[CAST(ITEM($0, 'city')):VARCHAR(20)], longitude=[CAST(ITEM(ITEM($0, 'loc'), 0)):FLOAT], latitude=[CAST(ITEM(ITEM($0, 'loc'), 1)):FLOAT], pop=[CAST(ITEM($0, 'pop')):INTEGER], state=[CAST(ITEM($0, 'state')):VARCHAR(2)], id=[CAST(ITEM($0, 'id')):VARCHAR(5)])\n" + " ElasticsearchFilter(condition=[AND(=(CAST(ITEM($0, 'state')):VARCHAR(2), 'CA'), >=(CAST(ITEM($0, 'pop')):INTEGER, 94000))])\n" + " ElasticsearchTableScan(table=[[elastic, zips]])\n\n"; calciteAssert() .query(sql) .returnsOrdered("city=NORWALK; longitude=-118.081767; latitude=33.90564;" + " pop=94188; state=CA; id=90650", "city=LOS ANGELES; longitude=-118.258189; latitude=34.007856;" + " pop=96074; state=CA; id=90011", "city=BELL GARDENS; longitude=-118.17205; latitude=33.969177;" + " pop=99568; state=CA; id=90201") .queryContains( ElasticsearchChecker.elasticsearchChecker("'query' : " + "{'constant_score':{filter:{bool:" + "{must:[{term:{state:'CA'}}," + "{range:{pop:{gte:94000}}}]}}}}", "'script_fields': {longitude:{script:'params._source.loc[0]'}, " + "latitude:{script:'params._source.loc[1]'}, " + "city:{script: 'params._source.city'}, " + "pop:{script: 'params._source.pop'}, " + "state:{script: 'params._source.state'}, " + "id:{script: 'params._source.id'}}", "sort: [ {state: 'asc'}, {pop: 'asc'}]", String.format(Locale.ROOT, "size:%s", ElasticsearchTransport.DEFAULT_FETCH_SIZE))) .explainContains(explain); } @Test void testDismaxQuery() { final String sql = "select * from zips\n" + "where state = 'CA' or pop >= 94000\n" + "order by state, pop"; final String explain = "PLAN=ElasticsearchToEnumerableConverter\n" + " ElasticsearchSort(sort0=[$4], sort1=[$3], dir0=[ASC], dir1=[ASC])\n" + " ElasticsearchProject(city=[CAST(ITEM($0, 'city')):VARCHAR(20)], longitude=[CAST(ITEM(ITEM($0, 'loc'), 0)):FLOAT], latitude=[CAST(ITEM(ITEM($0, 'loc'), 1)):FLOAT], pop=[CAST(ITEM($0, 'pop')):INTEGER], state=[CAST(ITEM($0, 'state')):VARCHAR(2)], id=[CAST(ITEM($0, 'id')):VARCHAR(5)])\n" + " ElasticsearchFilter(condition=[OR(=(CAST(ITEM($0, 'state')):VARCHAR(2), 'CA'), >=(CAST(ITEM($0, 'pop')):INTEGER, 94000))])\n" + " ElasticsearchTableScan(table=[[elastic, zips]])\n\n"; calciteAssert() .query(sql) .queryContains( ElasticsearchChecker.elasticsearchChecker("'query' : " + "{'dis_max':{'queries':[{bool:" + "{should:[{term:{state:'CA'}}," + "{range:{pop:{gte:94000}}}]}}]}}", "'script_fields': {longitude:{script:'params._source.loc[0]'}, " + "latitude:{script:'params._source.loc[1]'}, " + "city:{script: 'params._source.city'}, " + "pop:{script: 'params._source.pop'}, " + "state:{script: 'params._source.state'}, " + "id:{script: 'params._source.id'}}", "sort: [ {state: 'asc'}, {pop: 'asc'}]", String.format(Locale.ROOT, "size:%s", ElasticsearchTransport.DEFAULT_FETCH_SIZE))) .explainContains(explain); } @Test void testFilterSortDesc() { Assumptions.assumeTrue(Bug.CALCITE_4645_FIXED, "CALCITE-4645"); final String sql = "select * from zips\n" + "where pop BETWEEN 95000 AND 100000\n" + "order by state desc, pop"; calciteAssert() .query(sql) .limit(4) .returnsOrdered( "city=LOS ANGELES; longitude=-118.258189; latitude=34.007856; pop=96074; state=CA; id=90011", "city=BELL GARDENS; longitude=-118.17205; latitude=33.969177; pop=99568; state=CA; id=90201"); } @Test void testInPlan() { final String[] searches = { "query: {'constant_score':{filter:{terms:{pop:" + "[96074, 99568]}}}}", "script_fields: {longitude:{script:'params._source.loc[0]'}, " + "latitude:{script:'params._source.loc[1]'}, " + "city:{script: 'params._source.city'}, " + "pop:{script: 'params._source.pop'}, " + "state:{script: 'params._source.state'}, " + "id:{script: 'params._source.id'}}", String.format(Locale.ROOT, "size:%d", ElasticsearchTransport.DEFAULT_FETCH_SIZE) }; calciteAssert() .query("select * from zips where pop in (96074, 99568)") .returnsUnordered( "city=BELL GARDENS; longitude=-118.17205; latitude=33.969177; pop=99568; state=CA; id=90201", "city=LOS ANGELES; longitude=-118.258189; latitude=34.007856; pop=96074; state=CA; id=90011") .queryContains(ElasticsearchChecker.elasticsearchChecker(searches)); } @Test void testZips() { calciteAssert() .query("select state, city from zips") .returnsCount(ZIPS_SIZE); } @Test void testProject() { final String sql = "select state, city, 0 as zero\n" + "from zips\n" + "order by state, city"; calciteAssert() .query(sql) .limit(2) .returnsUnordered("state=AK; city=ANCHORAGE; zero=0", "state=AK; city=FAIRBANKS; zero=0") .queryContains( ElasticsearchChecker.elasticsearchChecker("script_fields:" + "{zero:{script:'0'}," + "state:{script:'params._source.state'}," + "city:{script:'params._source.city'}}", "sort:[{state:'asc'},{city:'asc'}]", String.format(Locale.ROOT, "size:%d", ElasticsearchTransport.DEFAULT_FETCH_SIZE))); } @Test void testFilter() { final String explain = "PLAN=ElasticsearchToEnumerableConverter\n" + " ElasticsearchProject(state=[CAST(ITEM($0, 'state')):VARCHAR(2)], city=[CAST(ITEM($0, 'city')):VARCHAR(20)])\n" + " ElasticsearchFilter(condition=[=(CAST(ITEM($0, 'state')):VARCHAR(2), 'CA')])\n" + " ElasticsearchTableScan(table=[[elastic, zips]])"; calciteAssert() .query("select state, city from zips where state = 'CA'") .limit(3) .returnsUnordered("state=CA; city=BELL GARDENS", "state=CA; city=LOS ANGELES", "state=CA; city=NORWALK") .explainContains(explain); } @Test void testFilterReversed() { calciteAssert() .query("select state, city from zips where 'WI' < state order by city") .limit(2) .returnsUnordered("state=WV; city=BECKLEY", "state=WY; city=CHEYENNE"); calciteAssert() .query("select state, city from zips where state > 'WI' order by city") .limit(2) .returnsUnordered("state=WV; city=BECKLEY", "state=WY; city=CHEYENNE"); } @Test void agg1() { calciteAssert() .query("select count(*) from zips") .queryContains( ElasticsearchChecker.elasticsearchChecker("'_source':false", "size:0", "'stored_fields': '_none_'", "track_total_hits:true")) .returns("EXPR$0=149\n"); // check with limit (should still return correct result). calciteAssert() .query("select count(*) from zips limit 1") .returns("EXPR$0=149\n"); calciteAssert() .query("select count(*) as cnt from zips") .queryContains( ElasticsearchChecker.elasticsearchChecker("'_source':false", "'stored_fields': '_none_'", "size:0", "track_total_hits:true")) .returns("cnt=149\n"); calciteAssert() .query("select min(pop), max(pop) from zips") .queryContains( ElasticsearchChecker.elasticsearchChecker("'_source':false", "size:0", "track_total_hits:true", "'stored_fields': '_none_'", "aggregations:{'EXPR$0':{min:{field:'pop'}},'EXPR$1':{max:" + "{field:'pop'}}}")) .returns("EXPR$0=21; EXPR$1=112047\n"); calciteAssert() .query("select min(pop) as min1, max(pop) as max1 from zips") .returns("min1=21; max1=112047\n"); calciteAssert() .query("select count(*), max(pop), min(pop), sum(pop), avg(pop) from zips") .returns("EXPR$0=149; EXPR$1=112047; EXPR$2=21; EXPR$3=7865489; EXPR$4=52788\n"); } @Test void groupBy() { // distinct calciteAssert() .query("select distinct state\n" + "from zips\n" + "limit 6") .queryContains( ElasticsearchChecker.elasticsearchChecker("_source:false", "size:0", "'stored_fields': '_none_'", "aggregations:{'g_state':{'terms':{'field':'state','missing':'__MISSING__', 'size' : 6}}}")) .returnsOrdered("state=AK", "state=AL", "state=AR", "state=AZ", "state=CA", "state=CO"); // without aggregate function calciteAssert() .query("select state, city\n" + "from zips\n" + "group by state, city\n" + "order by city limit 10") .queryContains( ElasticsearchChecker.elasticsearchChecker("'_source':false", "size:0", "'stored_fields': '_none_'", "aggregations:{'g_city':{'terms':{'field':'city','missing':'__MISSING__','size':10,'order':{'_key':'asc'}}", "aggregations:{'g_state':{'terms':{'field':'state','missing':'__MISSING__','size':10}}}}}}")) .returnsOrdered("state=SD; city=ABERDEEN", "state=SC; city=AIKEN", "state=TX; city=ALTON", "state=IA; city=AMES", "state=AK; city=ANCHORAGE", "state=MD; city=BALTIMORE", "state=ME; city=BANGOR", "state=KS; city=BAVARIA", "state=NJ; city=BAYONNE", "state=OR; city=BEAVERTON"); // ascending calciteAssert() .query("select min(pop), max(pop), state\n" + "from zips\n" + "group by state\n" + "order by state limit 3") .queryContains( ElasticsearchChecker.elasticsearchChecker("'_source':false", "size:0", "'stored_fields': '_none_'", "aggregations:{'g_state':{terms:{field:'state',missing:'__MISSING__',size:3," + " order:{'_key':'asc'}}", "aggregations:{'EXPR$0':{min:{field:'pop'}},'EXPR$1':{max:{field:'pop'}}}}}")) .returnsOrdered("EXPR$0=23238; EXPR$1=32383; state=AK", "EXPR$0=42124; EXPR$1=44165; state=AL", "EXPR$0=37428; EXPR$1=53532; state=AR"); // just one aggregation function calciteAssert() .query("select min(pop), state\n" + "from zips\n" + "group by state\n" + "order by state limit 3") .queryContains( ElasticsearchChecker.elasticsearchChecker("'_source':false", "size:0", "'stored_fields': '_none_'", "aggregations:{'g_state':{terms:{field:'state',missing:'__MISSING__'," + "size:3, order:{'_key':'asc'}}", "aggregations:{'EXPR$0':{min:{field:'pop'}} }}}")) .returnsOrdered("EXPR$0=23238; state=AK", "EXPR$0=42124; state=AL", "EXPR$0=37428; state=AR"); // group by count calciteAssert() .query("select count(city), state\n" + "from zips\n" + "group by state\n" + "order by state limit 3") .queryContains( ElasticsearchChecker.elasticsearchChecker("'_source':false", "size:0", "'stored_fields': '_none_'", "aggregations:{'g_state':{terms:{field:'state',missing:'__MISSING__'," + " size:3, order:{'_key':'asc'}}", "aggregations:{'EXPR$0':{'value_count':{field:'city'}} }}}")) .returnsOrdered("EXPR$0=3; state=AK", "EXPR$0=3; state=AL", "EXPR$0=3; state=AR"); // descending calciteAssert() .query("select min(pop), max(pop), state\n" + "from zips\n" + "group by state\n" + "order by state desc limit 3") .queryContains( ElasticsearchChecker.elasticsearchChecker("'_source':false", "size:0", "'stored_fields': '_none_'", "aggregations:{'g_state':{terms:{field:'state',missing:'__MISSING__'," + "size:3, order:{'_key':'desc'}}", "aggregations:{'EXPR$0':{min:{field:'pop'}},'EXPR$1':" + "{max:{field:'pop'}}}}}")) .returnsOrdered("EXPR$0=25968; EXPR$1=33107; state=WY", "EXPR$0=45196; EXPR$1=70185; state=WV", "EXPR$0=51008; EXPR$1=57187; state=WI"); } /** Tests the {@code NOT} operator. */ @Test void notOperator() { // largest zips (states) in mini-zip by pop (sorted) : IL, NY, CA, MI calciteAssert() .query("select count(*), max(pop) from zips where state not in ('IL')") .returns("EXPR$0=146; EXPR$1=111396\n"); calciteAssert() .query("select count(*), max(pop) from zips where not state in ('IL')") .returns("EXPR$0=146; EXPR$1=111396\n"); calciteAssert() .query("select count(*), max(pop) from zips where not state not in ('IL')") .returns("EXPR$0=3; EXPR$1=112047\n"); calciteAssert() .query("select count(*), max(pop) from zips where state not in ('IL', 'NY')") .returns("EXPR$0=143; EXPR$1=99568\n"); calciteAssert() .query("select count(*), max(pop) from zips where state not in ('IL', 'NY', 'CA')") .returns("EXPR$0=140; EXPR$1=84712\n"); } /** * Test of {@link org.apache.calcite.sql.fun.SqlStdOperatorTable#APPROX_COUNT_DISTINCT} which * will be translated to * <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-cardinality-aggregation.html">Cardinality Aggregation</a> * (approximate counts using HyperLogLog++ algorithm). */ @Test void approximateCount() { calciteAssert() .query("select state, approx_count_distinct(city), approx_count_distinct(pop) from zips" + " group by state order by state limit 3") .queryContains( ElasticsearchChecker.elasticsearchChecker("'_source':false", "size:0", "'stored_fields': '_none_'", "aggregations:{'g_state':{terms:{field:'state', missing:'__MISSING__', size:3, " + "order:{'_key':'asc'}}", "aggregations:{'EXPR$1':{cardinality:{field:'city'}}", "'EXPR$2':{cardinality:{field:'pop'}} " + " }}}")) .returnsOrdered("state=AK; EXPR$1=3; EXPR$2=3", "state=AL; EXPR$1=3; EXPR$2=3", "state=AR; EXPR$1=3; EXPR$2=3"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.commons.compress.compressors; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream; import org.apache.commons.compress.compressors.deflate.DeflateCompressorInputStream; import org.apache.commons.compress.compressors.deflate.DeflateCompressorOutputStream; import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream; import org.apache.commons.compress.compressors.lzma.LZMACompressorInputStream; import org.apache.commons.compress.compressors.lzma.LZMAUtils; import org.apache.commons.compress.compressors.xz.XZCompressorInputStream; import org.apache.commons.compress.compressors.xz.XZCompressorOutputStream; import org.apache.commons.compress.compressors.xz.XZUtils; import org.apache.commons.compress.compressors.pack200.Pack200CompressorInputStream; import org.apache.commons.compress.compressors.pack200.Pack200CompressorOutputStream; import org.apache.commons.compress.compressors.snappy.FramedSnappyCompressorInputStream; import org.apache.commons.compress.compressors.snappy.SnappyCompressorInputStream; import org.apache.commons.compress.compressors.z.ZCompressorInputStream; import org.apache.commons.compress.utils.IOUtils; /** * <p>Factory to create Compressor[In|Out]putStreams from names. To add other * implementations you should extend CompressorStreamFactory and override the * appropriate methods (and call their implementation from super of course).</p> * * Example (Compressing a file): * * <pre> * final OutputStream out = new FileOutputStream(output); * CompressorOutputStream cos = * new CompressorStreamFactory().createCompressorOutputStream(CompressorStreamFactory.BZIP2, out); * IOUtils.copy(new FileInputStream(input), cos); * cos.close(); * </pre> * * Example (Decompressing a file): * <pre> * final InputStream is = new FileInputStream(input); * CompressorInputStream in = * new CompressorStreamFactory().createCompressorInputStream(CompressorStreamFactory.BZIP2, is); * IOUtils.copy(in, new FileOutputStream(output)); * in.close(); * </pre> * @Immutable provided that the deprecated method setDecompressConcatenated is not used. * @ThreadSafe even if the deprecated method setDecompressConcatenated is used */ public class CompressorStreamFactory { /** * Constant (value {@value}) used to identify the BZIP2 compression algorithm. * @since 1.1 */ public static final String BZIP2 = "bzip2"; /** * Constant (value {@value}) used to identify the GZIP compression algorithm. * Not supported as an output stream type. * @since 1.1 */ public static final String GZIP = "gz"; /** * Constant (value {@value}) used to identify the PACK200 compression algorithm. * @since 1.3 */ public static final String PACK200 = "pack200"; /** * Constant (value {@value}) used to identify the XZ compression method. * @since 1.4 */ public static final String XZ = "xz"; /** * Constant (value {@value}) used to identify the LZMA compression method. * Not supported as an output stream type. * @since 1.6 */ public static final String LZMA = "lzma"; /** * Constant (value {@value}) used to identify the "framed" Snappy compression method. * Not supported as an output stream type. * @since 1.7 */ public static final String SNAPPY_FRAMED = "snappy-framed"; /** * Constant (value {@value}) used to identify the "raw" Snappy compression method. * Not supported as an output stream type. * @since 1.7 */ public static final String SNAPPY_RAW = "snappy-raw"; /** * Constant (value {@value}) used to identify the traditional Unix compress method. * Not supported as an output stream type. * @since 1.7 */ public static final String Z = "z"; /** * Constant (value {@value}) used to identify the Deflate compress method. * @since 1.9 */ public static final String DEFLATE = "deflate"; /** * If true, decompress until the end of the input. * If false, stop after the first stream and leave the * input position to point to the next byte after the stream */ private final Boolean decompressUntilEOF; // This is Boolean so setDecompressConcatenated can determine whether it has been set by the ctor // once the setDecompressConcatenated method has been removed, it can revert to boolean /** * If true, decompress until the end of the input. * If false, stop after the first stream and leave the * input position to point to the next byte after the stream */ private volatile boolean decompressConcatenated = false; /** * Create an instance with the decompress Concatenated option set to false. */ public CompressorStreamFactory() { this.decompressUntilEOF = null; } /** * Create an instance with the provided decompress Concatenated option. * @param decompressUntilEOF * if true, decompress until the end of the * input; if false, stop after the first * stream and leave the input position to point * to the next byte after the stream. * This setting applies to the gzip, bzip2 and xz formats only. * @since 1.10 */ public CompressorStreamFactory(final boolean decompressUntilEOF) { this.decompressUntilEOF = Boolean.valueOf(decompressUntilEOF); // Also copy to existing variable so can continue to use that as the current value this.decompressConcatenated = decompressUntilEOF; } /** * Whether to decompress the full input or only the first stream * in formats supporting multiple concatenated input streams. * * <p>This setting applies to the gzip, bzip2 and xz formats only.</p> * * @param decompressConcatenated * if true, decompress until the end of the * input; if false, stop after the first * stream and leave the input position to point * to the next byte after the stream * @since 1.5 * @deprecated 1.10 use the {@link #CompressorStreamFactory(boolean)} constructor instead * @throws IllegalStateException if the constructor {@link #CompressorStreamFactory(boolean)} * was used to create the factory */ @Deprecated public void setDecompressConcatenated(final boolean decompressConcatenated) { if (this.decompressUntilEOF != null) { throw new IllegalStateException("Cannot override the setting defined by the constructor"); } this.decompressConcatenated = decompressConcatenated; } /** * Create an compressor input stream from an input stream, autodetecting * the compressor type from the first few bytes of the stream. The InputStream * must support marks, like BufferedInputStream. * * @param in the input stream * @return the compressor input stream * @throws CompressorException if the compressor name is not known * @throws IllegalArgumentException if the stream is null or does not support mark * @since 1.1 */ public CompressorInputStream createCompressorInputStream(final InputStream in) throws CompressorException { if (in == null) { throw new IllegalArgumentException("Stream must not be null."); } if (!in.markSupported()) { throw new IllegalArgumentException("Mark is not supported."); } final byte[] signature = new byte[12]; in.mark(signature.length); try { final int signatureLength = IOUtils.readFully(in, signature); in.reset(); if (BZip2CompressorInputStream.matches(signature, signatureLength)) { return new BZip2CompressorInputStream(in, decompressConcatenated); } if (GzipCompressorInputStream.matches(signature, signatureLength)) { return new GzipCompressorInputStream(in, decompressConcatenated); } if (Pack200CompressorInputStream.matches(signature, signatureLength)) { return new Pack200CompressorInputStream(in); } if (FramedSnappyCompressorInputStream.matches(signature, signatureLength)) { return new FramedSnappyCompressorInputStream(in); } if (ZCompressorInputStream.matches(signature, signatureLength)) { return new ZCompressorInputStream(in); } if (DeflateCompressorInputStream.matches(signature, signatureLength)) { return new DeflateCompressorInputStream(in); } if (XZUtils.matches(signature, signatureLength) && XZUtils.isXZCompressionAvailable()) { return new XZCompressorInputStream(in, decompressConcatenated); } if (LZMAUtils.matches(signature, signatureLength) && LZMAUtils.isLZMACompressionAvailable()) { return new LZMACompressorInputStream(in); } } catch (final IOException e) { throw new CompressorException("Failed to detect Compressor from InputStream.", e); } throw new CompressorException("No Compressor found for the stream signature."); } /** * Create a compressor input stream from a compressor name and an input stream. * * @param name of the compressor, * i.e. {@value #GZIP}, {@value #BZIP2}, {@value #XZ}, {@value #LZMA}, * {@value #PACK200}, {@value #SNAPPY_RAW}, {@value #SNAPPY_FRAMED}, * {@value #Z} or {@value #DEFLATE} * @param in the input stream * @return compressor input stream * @throws CompressorException if the compressor name is not known * @throws IllegalArgumentException if the name or input stream is null */ public CompressorInputStream createCompressorInputStream(final String name, final InputStream in) throws CompressorException { if (name == null || in == null) { throw new IllegalArgumentException( "Compressor name and stream must not be null."); } try { if (GZIP.equalsIgnoreCase(name)) { return new GzipCompressorInputStream(in, decompressConcatenated); } if (BZIP2.equalsIgnoreCase(name)) { return new BZip2CompressorInputStream(in, decompressConcatenated); } if (XZ.equalsIgnoreCase(name)) { return new XZCompressorInputStream(in, decompressConcatenated); } if (LZMA.equalsIgnoreCase(name)) { return new LZMACompressorInputStream(in); } if (PACK200.equalsIgnoreCase(name)) { return new Pack200CompressorInputStream(in); } if (SNAPPY_RAW.equalsIgnoreCase(name)) { return new SnappyCompressorInputStream(in); } if (SNAPPY_FRAMED.equalsIgnoreCase(name)) { return new FramedSnappyCompressorInputStream(in); } if (Z.equalsIgnoreCase(name)) { return new ZCompressorInputStream(in); } if (DEFLATE.equalsIgnoreCase(name)) { return new DeflateCompressorInputStream(in); } } catch (final IOException e) { throw new CompressorException( "Could not create CompressorInputStream.", e); } throw new CompressorException("Compressor: " + name + " not found."); } /** * Create an compressor output stream from an compressor name and an output stream. * * @param name the compressor name, * i.e. {@value #GZIP}, {@value #BZIP2}, {@value #XZ}, * {@value #PACK200} or {@value #DEFLATE} * @param out the output stream * @return the compressor output stream * @throws CompressorException if the archiver name is not known * @throws IllegalArgumentException if the archiver name or stream is null */ public CompressorOutputStream createCompressorOutputStream( final String name, final OutputStream out) throws CompressorException { if (name == null || out == null) { throw new IllegalArgumentException( "Compressor name and stream must not be null."); } try { if (GZIP.equalsIgnoreCase(name)) { return new GzipCompressorOutputStream(out); } if (BZIP2.equalsIgnoreCase(name)) { return new BZip2CompressorOutputStream(out); } if (XZ.equalsIgnoreCase(name)) { return new XZCompressorOutputStream(out); } if (PACK200.equalsIgnoreCase(name)) { return new Pack200CompressorOutputStream(out); } if (DEFLATE.equalsIgnoreCase(name)) { return new DeflateCompressorOutputStream(out); } } catch (final IOException e) { throw new CompressorException( "Could not create CompressorOutputStream", e); } throw new CompressorException("Compressor: " + name + " not found."); } // For Unit tests boolean getDecompressConcatenated() { return decompressConcatenated; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.adapter.elasticsearch; import org.apache.calcite.adapter.java.AbstractQueryableTable; import org.apache.calcite.linq4j.Enumerable; import org.apache.calcite.linq4j.Enumerator; import org.apache.calcite.linq4j.Linq4j; import org.apache.calcite.linq4j.QueryProvider; import org.apache.calcite.linq4j.Queryable; import org.apache.calcite.linq4j.function.Function1; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptTable; import org.apache.calcite.rel.RelFieldCollation; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.schema.SchemaPlus; import org.apache.calcite.schema.TranslatableTable; import org.apache.calcite.schema.impl.AbstractTableQueryable; import org.apache.calcite.sql.type.SqlTypeName; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import com.google.common.collect.ImmutableMap; import java.io.IOException; import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.function.Consumer; import java.util.function.Predicate; import java.util.stream.Collectors; /** * Table based on an Elasticsearch index. */ public class ElasticsearchTable extends AbstractQueryableTable implements TranslatableTable { /** * Used for constructing (possibly nested) Elastic aggregation nodes. */ private static final String AGGREGATIONS = "aggregations"; private final ElasticsearchVersion version; private final String indexName; final ObjectMapper mapper; final ElasticsearchTransport transport; /** * Creates an ElasticsearchTable. */ ElasticsearchTable(ElasticsearchTransport transport) { super(Object[].class); this.transport = Objects.requireNonNull(transport, "transport"); this.version = transport.version; this.indexName = transport.indexName; this.mapper = transport.mapper(); } /** * In ES 5.x scripted fields start with {@code params._source.foo} while in ES2.x * {@code _source.foo}. Helper method to build correct query based on runtime version of elastic. * Used to keep backwards compatibility with ES2. * * @see <a href="https://github.com/elastic/elasticsearch/issues/20068">_source variable</a> * @see <a href="https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-scripting-fields.html">Scripted Fields</a> * @return string to be used for scripted fields */ String scriptedFieldPrefix() { // ES2 vs ES5 scripted field difference return version == ElasticsearchVersion.ES2 ? ElasticsearchConstants.SOURCE_GROOVY : ElasticsearchConstants.SOURCE_PAINLESS; } /** * Executes a "find" operation on the underlying index. * * @param ops List of operations represented as Json strings. * @param fields List of fields to project; or null to return map * @param sort list of fields to sort and their direction (asc/desc) * @param aggregations aggregation functions * @return Enumerator of results */ private Enumerable<Object> find(List<String> ops, List<Map.Entry<String, Class>> fields, List<Map.Entry<String, RelFieldCollation.Direction>> sort, List<String> groupBy, List<Map.Entry<String, String>> aggregations, Map<String, String> mappings, Long offset, Long fetch) throws IOException { if (!aggregations.isEmpty() || !groupBy.isEmpty()) { // process aggregations separately return aggregate(ops, fields, sort, groupBy, aggregations, mappings, offset, fetch); } final ObjectNode query = mapper.createObjectNode(); // manually parse from previously concatenated string for (String op: ops) { query.setAll((ObjectNode) mapper.readTree(op)); } if (!sort.isEmpty()) { ArrayNode sortNode = query.withArray("sort"); sort.forEach(e -> sortNode.add( mapper.createObjectNode().put(e.getKey(), e.getValue().isDescending() ? "desc" : "asc"))); } if (offset != null) { query.put("from", offset); } if (fetch != null) { query.put("size", fetch); } final Function1<ElasticsearchJson.SearchHit, Object> getter = ElasticsearchEnumerators.getter(fields, ImmutableMap.copyOf(mappings)); Iterable<ElasticsearchJson.SearchHit> iter; if (offset == null) { // apply scrolling when there is no offsets iter = () -> new Scrolling(transport).query(query); } else { final ElasticsearchJson.Result search = transport.search().apply(query); iter = () -> search.searchHits().hits().iterator(); } return Linq4j.asEnumerable(iter).select(getter); } private Enumerable<Object> aggregate(List<String> ops, List<Map.Entry<String, Class>> fields, List<Map.Entry<String, RelFieldCollation.Direction>> sort, List<String> groupBy, List<Map.Entry<String, String>> aggregations, Map<String, String> mapping, Long offset, Long fetch) throws IOException { if (!groupBy.isEmpty() && offset != null) { String message = "Currently ES doesn't support generic pagination " + "with aggregations. You can still use LIMIT keyword (without OFFSET). " + "For more details see https://github.com/elastic/elasticsearch/issues/4915"; throw new IllegalStateException(message); } final ObjectNode query = mapper.createObjectNode(); // manually parse into JSON from previously concatenated strings for (String op: ops) { query.setAll((ObjectNode) mapper.readTree(op)); } // remove / override attributes which are not applicable to aggregations query.put("_source", false); query.put("size", 0); query.remove("script_fields"); // set _source = false and size = 0, `FetchPhase` would still be executed // to fetch the metadata fields and visit the Lucene stored_fields, // which would lead to performance declined dramatically. // `stored_fields = _none` can prohibit such behavior entirely query.put("stored_fields", "_none_"); // allows to detect aggregation for count(*) final Predicate<Map.Entry<String, String>> isCountStar = e -> e.getValue() .contains("\"" + ElasticsearchConstants.ID + "\""); // list of expressions which are count(*) final Set<String> countAll = aggregations.stream() .filter(isCountStar) .map(Map.Entry::getKey).collect(Collectors.toSet()); final Map<String, String> fieldMap = new HashMap<>(); // due to ES aggregation format. fields in "order by" clause should go first // if "order by" is missing. order in "group by" is un-important final Set<String> orderedGroupBy = new LinkedHashSet<>(); orderedGroupBy.addAll(sort.stream().map(Map.Entry::getKey).collect(Collectors.toList())); orderedGroupBy.addAll(groupBy); // construct nested aggregations node(s) ObjectNode parent = query.with(AGGREGATIONS); for (String name: orderedGroupBy) { final String aggName = "g_" + name; fieldMap.put(aggName, name); final ObjectNode section = parent.with(aggName); final ObjectNode terms = section.with("terms"); terms.put("field", name); transport.mapping.missingValueFor(name).ifPresent(m -> { // expose missing terms. each type has a different missing value terms.set("missing", m); }); if (fetch != null) { terms.put("size", fetch); } sort.stream().filter(e -> e.getKey().equals(name)).findAny() .ifPresent(s -> terms.with("order") .put("_key", s.getValue().isDescending() ? "desc" : "asc")); parent = section.with(AGGREGATIONS); } // simple version for queries like "select count(*), max(col1) from table" (no GROUP BY cols) if (!groupBy.isEmpty() || !aggregations.stream().allMatch(isCountStar)) { for (Map.Entry<String, String> aggregation : aggregations) { JsonNode value = mapper.readTree(aggregation.getValue()); parent.set(aggregation.getKey(), value); } } final Consumer<JsonNode> emptyAggRemover = new Consumer<JsonNode>() { @Override public void accept(JsonNode node) { if (!node.has(AGGREGATIONS)) { node.elements().forEachRemaining(this); return; } JsonNode agg = node.get(AGGREGATIONS); if (agg.size() == 0) { ((ObjectNode) node).remove(AGGREGATIONS); } else { this.accept(agg); } } }; // cleanup query. remove empty AGGREGATIONS element (if empty) emptyAggRemover.accept(query); // This must be set to true or else in 7.X and 6/7 mixed clusters // will return lower bounded count values instead of an accurate count. if (groupBy.isEmpty() && version.elasticVersionMajor() >= ElasticsearchVersion.ES6.elasticVersionMajor()) { query.put("track_total_hits", true); } ElasticsearchJson.Result res = transport.search(Collections.emptyMap()).apply(query); final List<Map<String, Object>> result = new ArrayList<>(); if (res.aggregations() != null) { // collect values ElasticsearchJson.visitValueNodes(res.aggregations(), m -> { // using 'Collectors.toMap' will trigger Java 8 bug here Map<String, Object> newMap = new LinkedHashMap<>(); for (String key: m.keySet()) { newMap.put(fieldMap.getOrDefault(key, key), m.get(key)); } result.add(newMap); }); } else { // probably no group by. add single result result.add(new LinkedHashMap<>()); } // elastic exposes total number of documents matching a query in "/hits/total" path // this can be used for simple "select count(*) from table" final long total = res.searchHits().total().value(); if (groupBy.isEmpty()) { // put totals automatically for count(*) expression(s), unless they contain group by for (String expr : countAll) { result.forEach(m -> m.put(expr, total)); } } final Function1<ElasticsearchJson.SearchHit, Object> getter = ElasticsearchEnumerators.getter(fields, ImmutableMap.copyOf(mapping)); ElasticsearchJson.SearchHits hits = new ElasticsearchJson.SearchHits(res.searchHits().total(), result.stream() .map(r -> new ElasticsearchJson.SearchHit("_id", r, null)) .collect(Collectors.toList())); return Linq4j.asEnumerable(hits.hits()).select(getter); } @Override public RelDataType getRowType(RelDataTypeFactory relDataTypeFactory) { final RelDataType mapType = relDataTypeFactory.createMapType( relDataTypeFactory.createSqlType(SqlTypeName.VARCHAR), relDataTypeFactory.createTypeWithNullability( relDataTypeFactory.createSqlType(SqlTypeName.ANY), true)); return relDataTypeFactory.builder().add("_MAP", mapType).build(); } @Override public String toString() { return "ElasticsearchTable{" + indexName + "}"; } @Override public <T> Queryable<T> asQueryable(QueryProvider queryProvider, SchemaPlus schema, String tableName) { return new ElasticsearchQueryable<>(queryProvider, schema, this, tableName); } @Override public RelNode toRel(RelOptTable.ToRelContext context, RelOptTable relOptTable) { final RelOptCluster cluster = context.getCluster(); return new ElasticsearchTableScan(cluster, cluster.traitSetOf(ElasticsearchRel.CONVENTION), relOptTable, this, null); } /** * Implementation of {@link Queryable} based on * a {@link ElasticsearchTable}. * * @param <T> element type */ public static class ElasticsearchQueryable<T> extends AbstractTableQueryable<T> { ElasticsearchQueryable(QueryProvider queryProvider, SchemaPlus schema, ElasticsearchTable table, String tableName) { super(queryProvider, schema, table, tableName); } @Override public Enumerator<T> enumerator() { return null; } private ElasticsearchTable getTable() { return (ElasticsearchTable) table; } /** Called via code-generation. * @param ops list of queries (as strings) * @param fields projection * @see ElasticsearchMethod#ELASTICSEARCH_QUERYABLE_FIND * @return result as enumerable */ @SuppressWarnings("UnusedDeclaration") public Enumerable<Object> find(List<String> ops, List<Map.Entry<String, Class>> fields, List<Map.Entry<String, RelFieldCollation.Direction>> sort, List<String> groupBy, List<Map.Entry<String, String>> aggregations, Map<String, String> mappings, Long offset, Long fetch) { try { return getTable().find(ops, fields, sort, groupBy, aggregations, mappings, offset, fetch); } catch (IOException e) { throw new UncheckedIOException("Failed to query " + getTable().indexName, e); } } } }
package org.semanticweb.elk.reasoner.saturation.rules.subsumers; /* * #%L * ELK Reasoner * $Id:$ * $HeadURL:$ * %% * Copyright (C) 2011 - 2013 Department of Computer Science, University of Oxford * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import java.util.ArrayList; import java.util.Collection; import java.util.Map; import java.util.Set; import org.semanticweb.elk.reasoner.indexing.hierarchy.IndexedClassExpression; import org.semanticweb.elk.reasoner.indexing.hierarchy.IndexedObjectProperty; import org.semanticweb.elk.reasoner.indexing.hierarchy.IndexedObjectSomeValuesFrom; import org.semanticweb.elk.reasoner.indexing.hierarchy.IndexedPropertyChain; import org.semanticweb.elk.reasoner.indexing.modifiable.ModifiableIndexedObjectSomeValuesFrom; import org.semanticweb.elk.reasoner.indexing.modifiable.ModifiableOntologyIndex; import org.semanticweb.elk.reasoner.saturation.conclusions.interfaces.BackwardLink; import org.semanticweb.elk.reasoner.saturation.conclusions.interfaces.Propagation; import org.semanticweb.elk.reasoner.saturation.conclusions.interfaces.Subsumer; import org.semanticweb.elk.reasoner.saturation.context.Context; import org.semanticweb.elk.reasoner.saturation.context.ContextPremises; import org.semanticweb.elk.reasoner.saturation.context.SubContextPremises; import org.semanticweb.elk.reasoner.saturation.properties.SaturatedPropertyChain; import org.semanticweb.elk.reasoner.saturation.rules.ConclusionProducer; import org.semanticweb.elk.reasoner.saturation.tracing.inferences.ReflexiveSubsumer; import org.semanticweb.elk.reasoner.saturation.tracing.inferences.TracedPropagation; import org.semanticweb.elk.util.collections.LazySetIntersection; import org.semanticweb.elk.util.collections.LazySetUnion; import org.semanticweb.elk.util.collections.chains.Chain; import org.semanticweb.elk.util.collections.chains.Matcher; import org.semanticweb.elk.util.collections.chains.ReferenceFactory; import org.semanticweb.elk.util.collections.chains.SimpleTypeBasedMatcher; /** * A {@link ChainableSubsumerRule} producing {@link Propagation} of a * {@link Subsumer} {@link IndexedObjectSomeValuesFrom} over * {@link BackwardLink}s when the {@link IndexedClassExpression} filler of this * {@link IndexedObjectSomeValuesFrom} provided it can be used with at least one * {@link BackwardLink} in this {@link Context} * * @author "Yevgeny Kazakov" */ public class PropagationFromExistentialFillerRule extends AbstractChainableSubsumerRule { // logger for events /* * private static final Logger LOGGER_ = LoggerFactory * .getLogger(PropagationFromExistentialFillerRule.class); */ public static final String NAME = "ObjectSomeValuesFrom Propagation Introduction"; private final Collection<IndexedObjectSomeValuesFrom> negExistentials_; private PropagationFromExistentialFillerRule(ChainableSubsumerRule next) { super(next); this.negExistentials_ = new ArrayList<IndexedObjectSomeValuesFrom>(1); } private PropagationFromExistentialFillerRule( IndexedObjectSomeValuesFrom negExistential) { super(null); this.negExistentials_ = new ArrayList<IndexedObjectSomeValuesFrom>(1); this.negExistentials_.add(negExistential); } // TODO: hide this method public Collection<IndexedObjectSomeValuesFrom> getNegativeExistentials() { return negExistentials_; } public static boolean addRuleFor( ModifiableIndexedObjectSomeValuesFrom existential, ModifiableOntologyIndex index) { return index.add(existential.getFiller(), new PropagationFromExistentialFillerRule(existential)); } public static boolean removeRuleFor( ModifiableIndexedObjectSomeValuesFrom existential, ModifiableOntologyIndex index) { return index.remove(existential.getFiller(), new PropagationFromExistentialFillerRule(existential)); } @Override public String getName() { return NAME; } @Override public void apply(IndexedClassExpression premise, ContextPremises premises, ConclusionProducer producer) { final Map<IndexedObjectProperty, ? extends SubContextPremises> subContextMap = premises .getSubContextPremisesByObjectProperty(); final Set<IndexedObjectProperty> candidatePropagationProperties = new LazySetUnion<IndexedObjectProperty>( premises.getLocalReflexiveObjectProperties(), subContextMap.keySet()); // TODO: deal with reflexive roles using another // rule and uncomment this // if (candidatePropagationProperties.isEmpty()) { // return; // } for (IndexedObjectSomeValuesFrom e : negExistentials_) { IndexedObjectProperty relation = e.getProperty(); /* * creating propagations for relevant sub-properties of the relation */ SaturatedPropertyChain saturation = relation.getSaturated(); for (IndexedObjectProperty property : new LazySetIntersection<IndexedObjectProperty>( candidatePropagationProperties, saturation.getSubProperties())) { // producer.produce(premises.getRoot(), new // Propagation(property, e)); if (subContextMap.get(property).isInitialized()) { // propagation introduction is a binary rule where the // sub-context being initialized is a premise producer.produce(premises.getRoot(), new TracedPropagation( property, e)); } } // TODO: create a composition rule to deal with reflexivity // propagating to the this context if relation is reflexive if (saturation.isDerivedReflexive()) { // producer.produce(premises.getRoot(), new // ComposedSubsumer(e)); producer.produce(premises.getRoot(), new ReflexiveSubsumer<IndexedObjectSomeValuesFrom>(e)); } } } @Override public boolean addTo(Chain<ChainableSubsumerRule> ruleChain) { if (isEmpty()) return true; PropagationFromExistentialFillerRule rule = ruleChain.getCreate( MATCHER_, FACTORY_); rule.negExistentials_.addAll(this.negExistentials_); return true; } @Override public boolean removeFrom(Chain<ChainableSubsumerRule> ruleChain) { if (isEmpty()) return true; PropagationFromExistentialFillerRule rule = ruleChain.find(MATCHER_); if (rule == null) return false; // else boolean success = true; int removed = 0; for (IndexedObjectSomeValuesFrom negExistential : this.negExistentials_) { if (rule.negExistentials_.remove(negExistential)) { removed++; } else { success = false; break; } } if (success) { if (rule.isEmpty()) ruleChain.remove(MATCHER_); return true; } // else revert all changes for (IndexedObjectSomeValuesFrom negExistential : this.negExistentials_) { if (removed == 0) break; removed--; rule.negExistentials_.add(negExistential); } return false; } @Override public void accept(LinkedSubsumerRuleVisitor visitor, IndexedClassExpression premise, ContextPremises premises, ConclusionProducer producer) { visitor.visit(this, premise, premises, producer); } /** * @return {@code true} if this rule never does anything */ private boolean isEmpty() { return negExistentials_.isEmpty(); } /** * Produces propagations of {@link IndexedObjectSomeValuesFrom} over the * given {@link IndexedPropertyChain} in the given {@link Context} * * @param property * @param premises * @param producer */ void applyForProperty(IndexedObjectProperty property, ContextPremises premises, ConclusionProducer producer) { for (IndexedObjectSomeValuesFrom e : negExistentials_) { SaturatedPropertyChain saturation = e.getProperty().getSaturated(); if (saturation.getSubPropertyChains().contains(property)) { // producer.produce(premises.getRoot(), new // Propagation(property, e)); producer.produce(premises.getRoot(), new TracedPropagation( property, e)); } } } public static void applyForProperty(LinkedSubsumerRule rule, IndexedObjectProperty property, ContextPremises premises, ConclusionProducer producer) { for (;;) { if (rule == null) return; PropagationFromExistentialFillerRule matchedRule = MATCHER_ .match(rule); if (matchedRule != null) { matchedRule.applyForProperty(property, premises, producer); return; } // else rule = rule.next(); } } private static final Matcher<LinkedSubsumerRule, PropagationFromExistentialFillerRule> MATCHER_ = new SimpleTypeBasedMatcher<LinkedSubsumerRule, PropagationFromExistentialFillerRule>( PropagationFromExistentialFillerRule.class); private static final ReferenceFactory<ChainableSubsumerRule, PropagationFromExistentialFillerRule> FACTORY_ = new ReferenceFactory<ChainableSubsumerRule, PropagationFromExistentialFillerRule>() { @Override public PropagationFromExistentialFillerRule create( ChainableSubsumerRule next) { return new PropagationFromExistentialFillerRule(next); } }; }
/* * Copyright 2015 Kejun Xia * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.shipdream.lib.poke; import com.shipdream.lib.poke.exception.CircularDependenciesException; import com.shipdream.lib.poke.exception.ProvideException; import com.shipdream.lib.poke.exception.ProviderConflictException; import com.shipdream.lib.poke.exception.ProviderMissingException; import com.shipdream.lib.poke.util.ReflectUtils; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.lang.annotation.Annotation; import java.lang.annotation.Documented; import java.lang.annotation.Retention; import javax.inject.Named; import javax.inject.Qualifier; import javax.inject.Singleton; import static java.lang.annotation.RetentionPolicy.RUNTIME; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.reset; import static org.mockito.Mockito.verify; public class TestProviderFinderByRegistry extends BaseTestCases { private Graph graph; private ProviderFinderByRegistry providerFinder; @Before public void setUp() throws Exception { providerFinder = new ProviderFinderByRegistry(); graph = new Graph() { { addProviderFinders(providerFinder); } }; } @Qualifier @Documented @Retention(RUNTIME) @interface Google { } @Qualifier @Documented @Retention(RUNTIME) @interface Microsoft { } interface Os { } static class iOs implements Os { } @Google static class Android implements Os { } @Microsoft static class Windows implements Os { } @Test(expected = ProviderConflictException.class) public void shouldDetectConflictProviderException() throws ProviderConflictException, ProvideException, CircularDependenciesException, ProviderMissingException { providerFinder.register(Os.class, iOs.class); providerFinder.register(Os.class, Android.class); providerFinder.register(Os.class, Android.class); } @Test(expected = NullPointerException.class) public void should_detect_unregister_null_implementationClassName_error() throws ProviderConflictException, ProvideException, CircularDependenciesException, ProviderMissingException, ClassNotFoundException { String impl = null; providerFinder.unregister(Os.class, impl); } @Test(expected = NullPointerException.class) public void should_detect_unregister_null_implementationClass_error() throws ProviderConflictException, ProvideException, CircularDependenciesException, ProviderMissingException, ClassNotFoundException { Class impl = null; providerFinder.unregister(Os.class, impl); } private static class Container { @MyInject private Os ios; @Google @MyInject private Os android; @Microsoft @MyInject private Os windows; } @Test public void shouldInjectQualifiedWithDifferentInstances() throws ProviderConflictException, ProvideException, CircularDependenciesException, ProviderMissingException { providerFinder.register(Os.class, iOs.class); providerFinder.register(Os.class, Android.class); providerFinder.register(Os.class, Windows.class); Container container = new Container(); graph.inject(container, MyInject.class); Container container2 = new Container(); graph.inject(container2, MyInject.class); Assert.assertEquals(container.ios.getClass(), iOs.class); Assert.assertEquals(container.android.getClass(), Android.class); Assert.assertEquals(container.windows.getClass(), Windows.class); Assert.assertTrue(container.ios != container2.ios); Assert.assertTrue(container.android != container2.android); Assert.assertTrue(container.windows != container2.windows); } @Test public void shouldInjectQualifiedSingletonInstance() throws ProviderConflictException, ProvideException, CircularDependenciesException, ProviderMissingException { ScopeCache scopeCache = new ScopeCache(); providerFinder.register(Os.class, iOs.class, scopeCache); providerFinder.register(Os.class, Android.class, scopeCache); providerFinder.register(Os.class, Windows.class, scopeCache); Container container = new Container(); graph.inject(container, MyInject.class); Container container2 = new Container(); graph.inject(container2, MyInject.class); Assert.assertEquals(container.ios.getClass(), iOs.class); Assert.assertEquals(container.android.getClass(), Android.class); Assert.assertEquals(container.windows.getClass(), Windows.class); Assert.assertTrue(container.ios == container2.ios); Assert.assertTrue(container.android == container2.android); Assert.assertTrue(container.windows == container2.windows); } static class ContainerComponent extends Component { @Provides public Os providesOs() { return new iOs(); } //Mismatch os intentionally to test if provides qualifier overrides qualifier of impl class @Microsoft @Provides public Os providesOs1() { return new Android(); } //Mismatch os intentionally to test if provides qualifier overrides qualifier of impl class @Google @Provides public Os providesOs2() { return new Windows(); } } @Test public void componentProvidesQualifierShouldOverrideImplClassQualifier() throws ProviderConflictException, ProvideException, CircularDependenciesException, ProviderMissingException { providerFinder.register(new ContainerComponent()); Container container = new Container(); graph.inject(container, MyInject.class); Container container2 = new Container(); graph.inject(container2, MyInject.class); Assert.assertEquals(container.ios.getClass(), iOs.class); Assert.assertEquals(container.android.getClass(), Windows.class); Assert.assertEquals(container.windows.getClass(), Android.class); Assert.assertTrue(container.ios != container2.ios); Assert.assertTrue(container.android != container2.android); Assert.assertTrue(container.windows != container2.windows); } interface Book { } @Named("A") static class BookA implements Book { } @Named("B") static class BookB implements Book { } @Test public void namedQualifierShouldBeRecognized() throws ProviderConflictException, ProvideException, CircularDependenciesException, ProviderMissingException { class Library { @MyInject @Named("A") private Book b1; @MyInject @Named("B") private Book b2; } providerFinder.register(Book.class, BookA.class); providerFinder.register(Book.class, BookB.class); Library library = new Library(); graph.inject(library, MyInject.class); Assert.assertEquals(library.b1.getClass(), BookA.class); Assert.assertEquals(library.b2.getClass(), BookB.class); } @Test public void incorrectNamedQualifierShouldBeRecognized() throws ProviderConflictException, ProvideException, CircularDependenciesException, ProviderMissingException { class Library { @MyInject @Named("B") private Book b1; } providerFinder.register(Book.class, BookA.class); providerFinder.register(Book.class, BookB.class); Library library = new Library(); graph.inject(library, MyInject.class); Assert.assertFalse(library.b1.getClass() == BookA.class); } @Test(expected = ProviderMissingException.class) public void badNamedQualifierShouldBeTreatedAsMissing() throws ProviderConflictException, ProvideException, CircularDependenciesException, ProviderMissingException { class Library { @MyInject @Named("C") private Book b1; } providerFinder.register(Book.class, BookA.class); providerFinder.register(Book.class, BookB.class); Library library = new Library(); graph.inject(library, MyInject.class); Assert.assertEquals(library.b1.getClass(), BookA.class); } @Test(expected = ProviderMissingException.class) public void badEmptyNamedQualifierShouldBeTreatedAsMissing() throws ProviderConflictException, ProvideException, CircularDependenciesException, ProviderMissingException { class Library { //Empty named qualifier is allowed but will be different with any non empty string //Named qualifier @MyInject @Named private Book b1; } providerFinder.register(Book.class, BookA.class); providerFinder.register(Book.class, BookB.class); Library library = new Library(); graph.inject(library, MyInject.class); Assert.assertEquals(library.b1.getClass(), BookA.class); } interface Food { } @Named static class Rice implements Food { } static class Wheat implements Food { } @Test public void emptyNamedQualifierShouldBeTreatedAsNormalQualifier() throws ProviderConflictException, ProvideException, CircularDependenciesException, ProviderMissingException { class Basket { @MyInject @Named private Food r; @MyInject private Food w; } providerFinder.register(Food.class, Rice.class); providerFinder.register(Food.class, Wheat.class); Basket basket = new Basket(); graph.inject(basket, MyInject.class); Assert.assertEquals(basket.r.getClass(), Rice.class); Assert.assertEquals(basket.w.getClass(), Wheat.class); } @Named static class Noodle implements Food { } static class Bread implements Food { } @Named static class Chicken implements Food { } static class Beef implements Food { } @Test public void overridingClassNameRegisteringShouldWorkAsExpected() throws ProviderConflictException, ProvideException, CircularDependenciesException, ProviderMissingException, ClassNotFoundException { class Basket { @MyInject @Named private Food r; @MyInject private Food w; } Basket basket = new Basket(); ScopeCache scopeCache = new ScopeCache(); providerFinder.register(Food.class, Rice.class.getName(), scopeCache); providerFinder.register(Food.class, Wheat.class.getName()); graph.inject(basket, MyInject.class); Assert.assertEquals(basket.r.getClass(), Rice.class); Assert.assertEquals(basket.w.getClass(), Wheat.class); boolean conflicted = false; try { providerFinder.register(Food.class, Noodle.class.getName()); } catch (ProviderConflictException e) { conflicted = true; } Assert.assertTrue(conflicted); conflicted = false; try { providerFinder.register(Food.class, Bread.class.getName()); } catch (ProviderConflictException e) { conflicted = true; } Assert.assertTrue(conflicted); providerFinder.register(Food.class, Noodle.class.getName(), null, true); providerFinder.register(Food.class, Bread.class.getName(), null, true); graph.inject(basket, MyInject.class); Assert.assertEquals(basket.r.getClass(), Noodle.class); Assert.assertEquals(basket.w.getClass(), Bread.class); providerFinder.register(Food.class, Chicken.class.getName(), null, true); providerFinder.register(Food.class, Beef.class.getName(), null, true); graph.inject(basket, MyInject.class); Assert.assertEquals(basket.r.getClass(), Chicken.class); Assert.assertEquals(basket.w.getClass(), Beef.class); providerFinder.unregister(Food.class, Chicken.class.getName()); providerFinder.unregister(Food.class, Bread.class.getName()); graph.inject(basket, MyInject.class); Assert.assertEquals(basket.r.getClass(), Rice.class); Assert.assertEquals(basket.w.getClass(), Wheat.class); providerFinder.unregister(Food.class, Chicken.class.getName()); providerFinder.unregister(Food.class, Bread.class.getName()); basket = new Basket(); boolean shouldCatchProviderMissingException = false; try { graph.inject(basket, MyInject.class); } catch (ProviderMissingException e) { shouldCatchProviderMissingException = true; } Assert.assertTrue(shouldCatchProviderMissingException); } static class BadComponent extends Component{ @Provides void provideNothing() { return; } } @Test public void should_throw_exception_when_there_is_void_function_in_component() throws ProvideException, ProviderConflictException { BadComponent badComponent = new BadComponent(); try { providerFinder.register(badComponent); } catch (ProvideException e) { Assert.assertTrue(e.getMessage().contains("must not return void")); return; } Assert.fail("Should raise ProvideException for provider returning void"); } @Qualifier @Retention(RUNTIME) @interface Qualifier1 {} @Qualifier @Retention(RUNTIME) @interface Qualifier2 {} static class DuplicateComponent extends Component{ @Provides @Qualifier1 @Qualifier2 String provideText() { return "123"; } } @Test public void should_throw_exception_when_provider_has_more_than_one_qualifier() throws ProvideException, ProviderConflictException { DuplicateComponent duplicateComponent = new DuplicateComponent(); try { providerFinder.register(duplicateComponent); } catch (ProvideException e) { Assert.assertTrue(e.getMessage().contains("Only one Qualifier")); return; } Assert.fail("Should raise ProvideException for provider with multiple qualifier"); } @Test public void should_throw_exception_if_provider_returns_null() throws ProvideException, ProviderConflictException, CircularDependenciesException, ProviderMissingException { Provider<String> provider = new Provider(String.class) { @Override protected String createInstance() throws ProvideException { return null; } }; provider.setScopeCache(new ScopeCache()); providerFinder.register(provider); class Container { @MyInject @Singleton private String name; } Container container = new Container(); try { graph.inject(container, MyInject.class); } catch (ProvideException e) { Assert.assertTrue(e.getMessage().contains("should not provide NULL as instance")); return; } Assert.fail("Should raise ProvideException for provider returns null"); } @Test public void overridingClassRegisteringShouldWorkAsExpected() throws ProviderConflictException, ProvideException, CircularDependenciesException, ProviderMissingException { class Basket { @MyInject @Named private Food r; @MyInject private Food w; } Basket basket = new Basket(); providerFinder.register(Food.class, Rice.class); providerFinder.register(Food.class, Wheat.class); graph.inject(basket, MyInject.class); Assert.assertEquals(basket.r.getClass(), Rice.class); Assert.assertEquals(basket.w.getClass(), Wheat.class); boolean conflicted = false; try { providerFinder.register(Food.class, Noodle.class); } catch (ProviderConflictException e) { conflicted = true; } Assert.assertTrue(conflicted); conflicted = false; try { providerFinder.register(Food.class, Bread.class); } catch (ProviderConflictException e) { conflicted = true; } Assert.assertTrue(conflicted); providerFinder.register(Food.class, Noodle.class, null, true); providerFinder.register(Food.class, Bread.class, null, true); graph.inject(basket, MyInject.class); Assert.assertEquals(basket.r.getClass(), Noodle.class); Assert.assertEquals(basket.w.getClass(), Bread.class); providerFinder.register(Food.class, Chicken.class, null, true); providerFinder.register(Food.class, Beef.class, null, true); graph.inject(basket, MyInject.class); Assert.assertEquals(basket.r.getClass(), Chicken.class); Assert.assertEquals(basket.w.getClass(), Beef.class); providerFinder.unregister(Food.class, Noodle.class); providerFinder.unregister(Food.class, Bread.class); graph.inject(basket, MyInject.class); Assert.assertEquals(basket.r.getClass(), Rice.class); Assert.assertEquals(basket.w.getClass(), Wheat.class); providerFinder.unregister(Food.class, Chicken.class); providerFinder.unregister(Food.class, Bread.class); basket = new Basket(); boolean shouldCatchProviderMissingException = false; try { graph.inject(basket, MyInject.class); } catch (ProviderMissingException e) { shouldCatchProviderMissingException = true; } Assert.assertTrue(shouldCatchProviderMissingException); } @Test public void overridingProviderRegisteringShouldWorkAsExpected() throws ProviderConflictException, ProvideException, CircularDependenciesException, ProviderMissingException { Provider<Food> providerRice = new Provider<Food>(Food.class) { @Override protected Food createInstance() throws ProvideException { return new Rice(); } @Override public Annotation getQualifier() { return ReflectUtils.findFirstQualifier(Rice.class); } }; Provider<Food> providerWheat = new Provider<Food>(Food.class) { @Override protected Food createInstance() throws ProvideException { return new Wheat(); } @Override public Annotation getQualifier() { return ReflectUtils.findFirstQualifier(Wheat.class); } }; Provider<Food> providerNoodle = new Provider<Food>(Food.class) { @Override protected Food createInstance() throws ProvideException { return new Noodle(); } @Override public Annotation getQualifier() { return ReflectUtils.findFirstQualifier(Noodle.class); } }; Provider<Food> providerBread = new Provider<Food>(Food.class) { @Override protected Food createInstance() throws ProvideException { return new Bread(); } @Override public Annotation getQualifier() { return ReflectUtils.findFirstQualifier(Bread.class); } }; Provider<Food> providerChicken = new Provider<Food>(Food.class) { @Override protected Food createInstance() throws ProvideException { return new Chicken(); } @Override public Annotation getQualifier() { return ReflectUtils.findFirstQualifier(Chicken.class); } }; Provider<Food> providerBeef = new Provider<Food>(Food.class) { @Override protected Food createInstance() throws ProvideException { return new Beef(); } @Override public Annotation getQualifier() { return ReflectUtils.findFirstQualifier(Beef.class); } }; class Basket { @MyInject @Named private Food r; @MyInject private Food w; } Basket basket = new Basket(); providerFinder.register(providerRice); providerFinder.register(providerWheat); graph.inject(basket, MyInject.class); Assert.assertEquals(basket.r.getClass(), Rice.class); Assert.assertEquals(basket.w.getClass(), Wheat.class); boolean conflicted = false; try { providerFinder.register(providerNoodle); } catch (ProviderConflictException e) { conflicted = true; } Assert.assertTrue(conflicted); conflicted = false; try { providerFinder.register(providerBread); } catch (ProviderConflictException e) { conflicted = true; } Assert.assertTrue(conflicted); providerFinder.register(providerNoodle, true); providerFinder.register(providerBread, true); graph.inject(basket, MyInject.class); Assert.assertEquals(basket.r.getClass(), Noodle.class); Assert.assertEquals(basket.w.getClass(), Bread.class); providerFinder.register(providerChicken, true); providerFinder.register(providerBeef, true); graph.inject(basket, MyInject.class); Assert.assertEquals(basket.r.getClass(), Chicken.class); Assert.assertEquals(basket.w.getClass(), Beef.class); providerFinder.unregister(providerChicken); providerFinder.unregister(providerBeef); graph.inject(basket, MyInject.class); Assert.assertEquals(basket.r.getClass(), Rice.class); Assert.assertEquals(basket.w.getClass(), Wheat.class); providerFinder.unregister(providerNoodle); providerFinder.unregister(providerWheat); basket = new Basket(); boolean shouldCatchProviderMissingException = false; try { graph.inject(basket, MyInject.class); } catch (ProviderMissingException e) { shouldCatchProviderMissingException = true; } Assert.assertTrue(shouldCatchProviderMissingException); } static class Apple implements Food{} static class Orange implements Food{} static class Banana implements Food{} static class FoodCompA extends Component { @Provides @Named public Food provideApple() { return new Apple(); } @Provides public Food provideOrange() { return new Orange(); } } static class FoodCompB extends Component { @Provides public Food provideApple() { return new Apple(); } @Provides @Named public Food provideOrange() { return new Orange(); } } static class FoodCompC extends Component { @Provides @Named public Food provideApple() { return new Apple(); } @Provides public Food provideBanana() { return new Banana(); } } @Test public void overridingComponentRegisteringShouldWorkAsExpected() throws ProviderConflictException, ProvideException, CircularDependenciesException, ProviderMissingException { class Basket { @MyInject @Named private Food r; @MyInject private Food w; } Basket basket = new Basket(); FoodCompA foodCompA = new FoodCompA(); FoodCompB foodCompB = new FoodCompB(); FoodCompC foodCompC = new FoodCompC(); providerFinder.register(foodCompA); graph.inject(basket, MyInject.class); Assert.assertEquals(basket.r.getClass(), Apple.class); Assert.assertEquals(basket.w.getClass(), Orange.class); boolean conflicted = false; try { providerFinder.register(new FoodCompB()); } catch (ProviderConflictException e) { conflicted = true; } Assert.assertTrue(conflicted); providerFinder.register(foodCompB, true); graph.inject(basket, MyInject.class); Assert.assertEquals(basket.r.getClass(), Orange.class); Assert.assertEquals(basket.w.getClass(), Apple.class); providerFinder.register(foodCompC, true); graph.inject(basket, MyInject.class); Assert.assertEquals(basket.r.getClass(), Apple.class); Assert.assertEquals(basket.w.getClass(), Banana.class); providerFinder.unregister(foodCompA); graph.inject(basket, MyInject.class); Assert.assertEquals(basket.r.getClass(), Apple.class); Assert.assertEquals(basket.w.getClass(), Orange.class); providerFinder.unregister(foodCompA); basket = new Basket(); boolean shouldCatchProviderMissingException = false; try { graph.inject(basket, MyInject.class); } catch (ProviderMissingException e) { shouldCatchProviderMissingException = true; } Assert.assertTrue(shouldCatchProviderMissingException); } @Test public void scopeCacheShouldRemoveUnregisteredBindings() throws ProviderConflictException, ProvideException, CircularDependenciesException, ProviderMissingException { class Basket { @MyInject @Named private Food r; @MyInject private Food w; } final Banana banana = new Banana(); final Orange orange = new Orange(); ScopeCache scopeCache = new ScopeCache(); ScopeCache scopeCacheOverridden = new ScopeCache(); Provider<Food> namedProviderBanana = new Provider<Food>(Food.class) { @Override protected Food createInstance() throws ProvideException { return banana; } @Override public Annotation getQualifier() { return ReflectUtils.findFirstQualifier(Noodle.class); } }; namedProviderBanana.setScopeCache(scopeCache); Provider<Food> unnammedProviderOrange = new Provider<Food>(Food.class) { @Override protected Food createInstance() throws ProvideException { return orange; } @Override public Annotation getQualifier() { return null; } }; unnammedProviderOrange.setScopeCache(scopeCache); Provider<Food> namedProviderOrangeOverridden = new Provider<Food>(Food.class) { @Override protected Food createInstance() throws ProvideException { return orange; } @Override public Annotation getQualifier() { return ReflectUtils.findFirstQualifier(Chicken.class); } }; namedProviderOrangeOverridden.setScopeCache(scopeCacheOverridden); Provider<Food> unnamedProviderBananaOverridden = new Provider<Food>(Food.class) { @Override protected Food createInstance() throws ProvideException { return banana; } @Override public Annotation getQualifier() { return null; } }; unnamedProviderBananaOverridden.setScopeCache(scopeCacheOverridden); Basket basket = new Basket(); providerFinder.register(namedProviderBanana); providerFinder.register(unnammedProviderOrange); graph.inject(basket, MyInject.class); Assert.assertTrue(basket.r == banana); Assert.assertTrue(basket.w == orange); Assert.assertTrue(findCacheInstance(scopeCache, namedProviderBanana) == banana); Assert.assertTrue(findCacheInstance(scopeCache, unnammedProviderOrange) == orange); Assert.assertTrue(findCacheInstance(scopeCacheOverridden, namedProviderBanana) == null); Assert.assertTrue(findCacheInstance(scopeCacheOverridden, unnammedProviderOrange) == null); boolean conflicted = false; try { providerFinder.register(namedProviderOrangeOverridden); } catch (ProviderConflictException e) { conflicted = true; } Assert.assertTrue(conflicted); conflicted = false; try { providerFinder.register(unnamedProviderBananaOverridden); } catch (ProviderConflictException e) { conflicted = true; } Assert.assertTrue(conflicted); providerFinder.register(namedProviderOrangeOverridden, true); providerFinder.register(unnamedProviderBananaOverridden, true); graph.inject(basket, MyInject.class); Assert.assertTrue(basket.r == orange); Assert.assertTrue(basket.w == banana); Assert.assertTrue(findCacheInstance(scopeCache, namedProviderBanana) == banana); Assert.assertTrue(findCacheInstance(scopeCache, unnammedProviderOrange) == orange); Assert.assertTrue(findCacheInstance(scopeCacheOverridden, namedProviderBanana) == orange); Assert.assertTrue(findCacheInstance(scopeCacheOverridden, unnammedProviderOrange) == banana); providerFinder.unregister(namedProviderBanana); providerFinder.unregister(unnammedProviderOrange); graph.inject(basket, MyInject.class); Assert.assertTrue(basket.r == banana); Assert.assertTrue(basket.w == orange); Assert.assertTrue(findCacheInstance(scopeCache, namedProviderBanana) == banana); Assert.assertTrue(findCacheInstance(scopeCache, unnamedProviderBananaOverridden) == orange); Assert.assertTrue(findCacheInstance(scopeCacheOverridden, namedProviderBanana) == null); Assert.assertTrue(findCacheInstance(scopeCacheOverridden, unnammedProviderOrange) == null); providerFinder.unregister(namedProviderOrangeOverridden); providerFinder.unregister(unnammedProviderOrange); basket = new Basket(); boolean shouldCatchProviderMissingException = false; try { graph.inject(basket, MyInject.class); } catch (ProviderMissingException e) { shouldCatchProviderMissingException = true; } Assert.assertTrue(shouldCatchProviderMissingException); Assert.assertTrue(findCacheInstance(scopeCache, namedProviderBanana) == null); Assert.assertTrue(findCacheInstance(scopeCache, unnamedProviderBananaOverridden) == null); Assert.assertTrue(findCacheInstance(scopeCacheOverridden, namedProviderBanana) == null); Assert.assertTrue(findCacheInstance(scopeCacheOverridden, unnammedProviderOrange) == null); } @SuppressWarnings("unchecked") private <T> T findCacheInstance(ScopeCache scopeCache, Provider<T> provider) { ScopeCache.CachedItem<T> cachedItem = scopeCache.findCacheItem(provider.type(), provider.getQualifier()); if(cachedItem != null) { return cachedItem.instance; } return null; } interface Contract { } class Execution implements Contract { } @Test public void should_be_able_to_register_implementation_into_simple_graph () throws ClassNotFoundException, ProviderConflictException, ProvideException { ProviderFinderByRegistry registry = mock(ProviderFinderByRegistry.class); SimpleGraph graph = new SimpleGraph(registry); //Register by name reset(registry); graph.register(String.class, "Impl1"); verify(registry).register(eq(String.class), eq("Impl1")); reset(registry); ScopeCache cache = mock(ScopeCache.class); graph.register(String.class, "Impl2", cache); verify(registry).register(eq(String.class), eq("Impl2"), eq(cache)); reset(registry); graph.register(String.class, "Impl3", cache, true); verify(registry).register(eq(String.class), eq("Impl3"), eq(cache), eq(true)); reset(registry); graph.register(String.class, "Impl4", cache, false); verify(registry).register(eq(String.class), eq("Impl4"), eq(cache), eq(false)); //Register by class reset(registry); graph.register(Contract.class, Execution.class); verify(registry).register(eq(Contract.class), eq(Execution.class)); reset(registry); graph.register(Contract.class, Execution.class, cache); verify(registry).register(eq(Contract.class), eq(Execution.class), eq(cache)); reset(registry); graph.register(Contract.class, Execution.class, cache, true); verify(registry).register(eq(Contract.class), eq(Execution.class), eq(cache), eq(true)); reset(registry); graph.register(Contract.class, Execution.class, cache, false); verify(registry).register(eq(Contract.class), eq(Execution.class), eq(cache), eq(false)); //Register by component Component component = mock(Component.class); reset(registry); graph.register(component); verify(registry).register(eq(component)); reset(registry); graph.register(component, true); verify(registry).register(eq(component), eq(true)); reset(registry); graph.register(component, false); verify(registry).register(eq(component), eq(false)); //Register by provider Provider provider = mock(Provider.class); reset(registry); graph.register(provider); verify(registry).register(eq(provider)); reset(registry); graph.register(provider, true); verify(registry).register(eq(provider), eq(true)); reset(registry); graph.register(provider, false); verify(registry).register(eq(provider), eq(false)); } @Test public void should_be_able_to_unregister_implementation_into_simple_graph () throws ClassNotFoundException, ProviderConflictException, ProvideException { ProviderFinderByRegistry registry = mock(ProviderFinderByRegistry.class); SimpleGraph graph = new SimpleGraph(registry); reset(registry); graph.unregister(String.class, "Impl1"); verify(registry).unregister(eq(String.class), eq("Impl1")); reset(registry); graph.unregister(Contract.class, Execution.class); verify(registry).unregister(eq(Contract.class), eq(Execution.class)); Component component = mock(Component.class); reset(registry); graph.unregister(component); verify(registry).unregister(eq(component)); Provider provider = mock(Provider.class); reset(registry); graph.unregister(provider); verify(registry).unregister(eq(provider)); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager; import java.io.IOException; import java.io.InputStream; import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentMap; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.net.Node; import org.apache.hadoop.security.authorize.PolicyProvider; import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.VersionUtil; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerState; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeState; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.server.api.ResourceTracker; import org.apache.hadoop.yarn.server.api.protocolrecords.NMContainerStatus; import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.NodeHeartbeatResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.UnRegisterNodeManagerRequest; import org.apache.hadoop.yarn.server.api.protocolrecords.UnRegisterNodeManagerResponse; import org.apache.hadoop.yarn.server.api.records.MasterKey; import org.apache.hadoop.yarn.server.api.records.NodeAction; import org.apache.hadoop.yarn.server.api.records.NodeStatus; import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.NodeLabelsUtils; import org.apache.hadoop.yarn.server.resourcemanager.resource.DynamicResourceConfiguration; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptContainerFinishedEvent; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeEvent; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeEventType; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeImpl; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeReconnectEvent; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeStartedEvent; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeStatusEvent; import org.apache.hadoop.yarn.server.resourcemanager.security.NMTokenSecretManagerInRM; import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager; import org.apache.hadoop.yarn.server.resourcemanager.security.authorize.RMPolicyProvider; import org.apache.hadoop.yarn.server.utils.YarnServerBuilderUtils; import org.apache.hadoop.yarn.util.RackResolver; import org.apache.hadoop.yarn.util.YarnVersionInfo; import com.google.common.annotations.VisibleForTesting; public class ResourceTrackerService extends AbstractService implements ResourceTracker { private static final Log LOG = LogFactory.getLog(ResourceTrackerService.class); private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); private final RMContext rmContext; private final NodesListManager nodesListManager; private final NMLivelinessMonitor nmLivelinessMonitor; private final RMContainerTokenSecretManager containerTokenSecretManager; private final NMTokenSecretManagerInRM nmTokenSecretManager; private long nextHeartBeatInterval; private Server server; private InetSocketAddress resourceTrackerAddress; private String minimumNodeManagerVersion; private int minAllocMb; private int minAllocVcores; private boolean isDistributedNodeLabelsConf; private boolean isDelegatedCentralizedNodeLabelsConf; private volatile DynamicResourceConfiguration drConf; public ResourceTrackerService(RMContext rmContext, NodesListManager nodesListManager, NMLivelinessMonitor nmLivelinessMonitor, RMContainerTokenSecretManager containerTokenSecretManager, NMTokenSecretManagerInRM nmTokenSecretManager) { super(ResourceTrackerService.class.getName()); this.rmContext = rmContext; this.nodesListManager = nodesListManager; this.nmLivelinessMonitor = nmLivelinessMonitor; this.containerTokenSecretManager = containerTokenSecretManager; this.nmTokenSecretManager = nmTokenSecretManager; } @Override protected void serviceInit(Configuration conf) throws Exception { resourceTrackerAddress = conf.getSocketAddr( YarnConfiguration.RM_BIND_HOST, YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS, YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS, YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_PORT); RackResolver.init(conf); nextHeartBeatInterval = conf.getLong(YarnConfiguration.RM_NM_HEARTBEAT_INTERVAL_MS, YarnConfiguration.DEFAULT_RM_NM_HEARTBEAT_INTERVAL_MS); if (nextHeartBeatInterval <= 0) { throw new YarnRuntimeException("Invalid Configuration. " + YarnConfiguration.RM_NM_HEARTBEAT_INTERVAL_MS + " should be larger than 0."); } minAllocMb = conf.getInt( YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB); minAllocVcores = conf.getInt( YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES, YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES); minimumNodeManagerVersion = conf.get( YarnConfiguration.RM_NODEMANAGER_MINIMUM_VERSION, YarnConfiguration.DEFAULT_RM_NODEMANAGER_MINIMUM_VERSION); if (YarnConfiguration.areNodeLabelsEnabled(conf)) { isDistributedNodeLabelsConf = YarnConfiguration.isDistributedNodeLabelConfiguration(conf); isDelegatedCentralizedNodeLabelsConf = YarnConfiguration.isDelegatedCentralizedNodeLabelConfiguration(conf); } loadDynamicResourceConfiguration(conf); super.serviceInit(conf); } /** * Load DynamicResourceConfiguration from dynamic-resources.xml. * @param conf * @throws IOException */ public void loadDynamicResourceConfiguration(Configuration conf) throws IOException { try { // load dynamic-resources.xml InputStream drInputStream = this.rmContext.getConfigurationProvider() .getConfigurationInputStream(conf, YarnConfiguration.DR_CONFIGURATION_FILE); if (drInputStream != null) { this.drConf = new DynamicResourceConfiguration(conf, drInputStream); } else { this.drConf = new DynamicResourceConfiguration(conf); } } catch (Exception e) { throw new IOException(e); } } /** * Update DynamicResourceConfiguration with new configuration. * @param conf */ public void updateDynamicResourceConfiguration( DynamicResourceConfiguration conf) { this.drConf = conf; } @Override protected void serviceStart() throws Exception { super.serviceStart(); // ResourceTrackerServer authenticates NodeManager via Kerberos if // security is enabled, so no secretManager. Configuration conf = getConfig(); YarnRPC rpc = YarnRPC.create(conf); this.server = rpc.getServer( ResourceTracker.class, this, resourceTrackerAddress, conf, null, conf.getInt(YarnConfiguration.RM_RESOURCE_TRACKER_CLIENT_THREAD_COUNT, YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_CLIENT_THREAD_COUNT)); // Enable service authorization? if (conf.getBoolean( CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, false)) { InputStream inputStream = this.rmContext.getConfigurationProvider() .getConfigurationInputStream(conf, YarnConfiguration.HADOOP_POLICY_CONFIGURATION_FILE); if (inputStream != null) { conf.addResource(inputStream); } refreshServiceAcls(conf, RMPolicyProvider.getInstance()); } this.server.start(); conf.updateConnectAddr(YarnConfiguration.RM_BIND_HOST, YarnConfiguration.RM_RESOURCE_TRACKER_ADDRESS, YarnConfiguration.DEFAULT_RM_RESOURCE_TRACKER_ADDRESS, server.getListenerAddress()); } @Override protected void serviceStop() throws Exception { if (this.server != null) { this.server.stop(); } super.serviceStop(); } /** * Helper method to handle received ContainerStatus. If this corresponds to * the completion of a master-container of a managed AM, * we call the handler for RMAppAttemptContainerFinishedEvent. */ @SuppressWarnings("unchecked") @VisibleForTesting void handleNMContainerStatus(NMContainerStatus containerStatus, NodeId nodeId) { ApplicationAttemptId appAttemptId = containerStatus.getContainerId().getApplicationAttemptId(); RMApp rmApp = rmContext.getRMApps().get(appAttemptId.getApplicationId()); if (rmApp == null) { LOG.error("Received finished container : " + containerStatus.getContainerId() + " for unknown application " + appAttemptId.getApplicationId() + " Skipping."); return; } if (rmApp.getApplicationSubmissionContext().getUnmanagedAM()) { if (LOG.isDebugEnabled()) { LOG.debug("Ignoring container completion status for unmanaged AM " + rmApp.getApplicationId()); } return; } RMAppAttempt rmAppAttempt = rmApp.getRMAppAttempt(appAttemptId); if (rmAppAttempt == null) { LOG.info("Ignoring not found attempt " + appAttemptId); return; } Container masterContainer = rmAppAttempt.getMasterContainer(); if (masterContainer.getId().equals(containerStatus.getContainerId()) && containerStatus.getContainerState() == ContainerState.COMPLETE) { ContainerStatus status = ContainerStatus.newInstance(containerStatus.getContainerId(), containerStatus.getContainerState(), containerStatus.getDiagnostics(), containerStatus.getContainerExitStatus()); // sending master container finished event. RMAppAttemptContainerFinishedEvent evt = new RMAppAttemptContainerFinishedEvent(appAttemptId, status, nodeId); rmContext.getDispatcher().getEventHandler().handle(evt); } } @SuppressWarnings("unchecked") @Override public RegisterNodeManagerResponse registerNodeManager( RegisterNodeManagerRequest request) throws YarnException, IOException { NodeId nodeId = request.getNodeId(); String host = nodeId.getHost(); int cmPort = nodeId.getPort(); int httpPort = request.getHttpPort(); Resource capability = request.getResource(); String nodeManagerVersion = request.getNMVersion(); RegisterNodeManagerResponse response = recordFactory .newRecordInstance(RegisterNodeManagerResponse.class); if (!minimumNodeManagerVersion.equals("NONE")) { if (minimumNodeManagerVersion.equals("EqualToRM")) { minimumNodeManagerVersion = YarnVersionInfo.getVersion(); } if ((nodeManagerVersion == null) || (VersionUtil.compareVersions(nodeManagerVersion,minimumNodeManagerVersion)) < 0) { String message = "Disallowed NodeManager Version " + nodeManagerVersion + ", is less than the minimum version " + minimumNodeManagerVersion + " sending SHUTDOWN signal to " + "NodeManager."; LOG.info(message); response.setDiagnosticsMessage(message); response.setNodeAction(NodeAction.SHUTDOWN); return response; } } // Check if this node is a 'valid' node if (!this.nodesListManager.isValidNode(host)) { String message = "Disallowed NodeManager from " + host + ", Sending SHUTDOWN signal to the NodeManager."; LOG.info(message); response.setDiagnosticsMessage(message); response.setNodeAction(NodeAction.SHUTDOWN); return response; } // check if node's capacity is load from dynamic-resources.xml String[] nodes = this.drConf.getNodes(); String nid = nodeId.toString(); if (nodes != null && Arrays.asList(nodes).contains(nid)) { capability.setMemory(this.drConf.getMemoryPerNode(nid)); capability.setVirtualCores(this.drConf.getVcoresPerNode(nid)); if (LOG.isDebugEnabled()) { LOG.debug("Resource for node: " + nid + " is adjusted to " + capability + " due to settings in dynamic-resources.xml."); } } // Check if this node has minimum allocations if (capability.getMemory() < minAllocMb || capability.getVirtualCores() < minAllocVcores) { String message = "NodeManager from " + host + " doesn't satisfy minimum allocations, Sending SHUTDOWN" + " signal to the NodeManager."; LOG.info(message); response.setDiagnosticsMessage(message); response.setNodeAction(NodeAction.SHUTDOWN); return response; } response.setContainerTokenMasterKey(containerTokenSecretManager .getCurrentKey()); response.setNMTokenMasterKey(nmTokenSecretManager .getCurrentKey()); RMNode rmNode = new RMNodeImpl(nodeId, rmContext, host, cmPort, httpPort, resolve(host), capability, nodeManagerVersion); RMNode oldNode = this.rmContext.getRMNodes().putIfAbsent(nodeId, rmNode); if (oldNode == null) { this.rmContext.getDispatcher().getEventHandler().handle( new RMNodeStartedEvent(nodeId, request.getNMContainerStatuses(), request.getRunningApplications())); } else { LOG.info("Reconnect from the node at: " + host); this.nmLivelinessMonitor.unregister(nodeId); // Reset heartbeat ID since node just restarted. oldNode.resetLastNodeHeartBeatResponse(); this.rmContext .getDispatcher() .getEventHandler() .handle( new RMNodeReconnectEvent(nodeId, rmNode, request .getRunningApplications(), request.getNMContainerStatuses())); } // On every node manager register we will be clearing NMToken keys if // present for any running application. this.nmTokenSecretManager.removeNodeKey(nodeId); this.nmLivelinessMonitor.register(nodeId); // Handle received container status, this should be processed after new // RMNode inserted if (!rmContext.isWorkPreservingRecoveryEnabled()) { if (!request.getNMContainerStatuses().isEmpty()) { LOG.info("received container statuses on node manager register :" + request.getNMContainerStatuses()); for (NMContainerStatus status : request.getNMContainerStatuses()) { handleNMContainerStatus(status, nodeId); } } } // Update node's labels to RM's NodeLabelManager. Set<String> nodeLabels = NodeLabelsUtils.convertToStringSet( request.getNodeLabels()); if (isDistributedNodeLabelsConf && nodeLabels != null) { try { updateNodeLabelsFromNMReport(nodeLabels, nodeId); response.setAreNodeLabelsAcceptedByRM(true); } catch (IOException ex) { // Ensure the exception is captured in the response response.setDiagnosticsMessage(ex.getMessage()); response.setAreNodeLabelsAcceptedByRM(false); } } else if (isDelegatedCentralizedNodeLabelsConf) { this.rmContext.getRMDelegatedNodeLabelsUpdater().updateNodeLabels(nodeId); } StringBuilder message = new StringBuilder(); message.append("NodeManager from node ").append(host).append("(cmPort: ") .append(cmPort).append(" httpPort: "); message.append(httpPort).append(") ") .append("registered with capability: ").append(capability); message.append(", assigned nodeId ").append(nodeId); if (response.getAreNodeLabelsAcceptedByRM()) { message.append(", node labels { ").append( StringUtils.join(",", nodeLabels) + " } "); } LOG.info(message.toString()); response.setNodeAction(NodeAction.NORMAL); response.setRMIdentifier(ResourceManager.getClusterTimeStamp()); response.setRMVersion(YarnVersionInfo.getVersion()); return response; } @SuppressWarnings("unchecked") @Override public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request) throws YarnException, IOException { NodeStatus remoteNodeStatus = request.getNodeStatus(); /** * Here is the node heartbeat sequence... * 1. Check if it's a valid (i.e. not excluded) node * 2. Check if it's a registered node * 3. Check if it's a 'fresh' heartbeat i.e. not duplicate heartbeat * 4. Send healthStatus to RMNode * 5. Update node's labels if distributed Node Labels configuration is enabled */ NodeId nodeId = remoteNodeStatus.getNodeId(); // 1. Check if it's a valid (i.e. not excluded) node, if not, see if it is // in decommissioning. if (!this.nodesListManager.isValidNode(nodeId.getHost()) && !isNodeInDecommissioning(nodeId)) { String message = "Disallowed NodeManager nodeId: " + nodeId + " hostname: " + nodeId.getHost(); LOG.info(message); return YarnServerBuilderUtils.newNodeHeartbeatResponse( NodeAction.SHUTDOWN, message); } // 2. Check if it's a registered node RMNode rmNode = this.rmContext.getRMNodes().get(nodeId); if (rmNode == null) { /* node does not exist */ String message = "Node not found resyncing " + remoteNodeStatus.getNodeId(); LOG.info(message); return YarnServerBuilderUtils.newNodeHeartbeatResponse(NodeAction.RESYNC, message); } // Send ping this.nmLivelinessMonitor.receivedPing(nodeId); // 3. Check if it's a 'fresh' heartbeat i.e. not duplicate heartbeat NodeHeartbeatResponse lastNodeHeartbeatResponse = rmNode.getLastNodeHeartBeatResponse(); if (remoteNodeStatus.getResponseId() + 1 == lastNodeHeartbeatResponse .getResponseId()) { LOG.info("Received duplicate heartbeat from node " + rmNode.getNodeAddress()+ " responseId=" + remoteNodeStatus.getResponseId()); return lastNodeHeartbeatResponse; } else if (remoteNodeStatus.getResponseId() + 1 < lastNodeHeartbeatResponse .getResponseId()) { String message = "Too far behind rm response id:" + lastNodeHeartbeatResponse.getResponseId() + " nm response id:" + remoteNodeStatus.getResponseId(); LOG.info(message); // TODO: Just sending reboot is not enough. Think more. this.rmContext.getDispatcher().getEventHandler().handle( new RMNodeEvent(nodeId, RMNodeEventType.REBOOTING)); return YarnServerBuilderUtils.newNodeHeartbeatResponse(NodeAction.RESYNC, message); } // Heartbeat response NodeHeartbeatResponse nodeHeartBeatResponse = YarnServerBuilderUtils .newNodeHeartbeatResponse(lastNodeHeartbeatResponse. getResponseId() + 1, NodeAction.NORMAL, null, null, null, null, nextHeartBeatInterval); rmNode.updateNodeHeartbeatResponseForCleanup(nodeHeartBeatResponse); rmNode.updateNodeHeartbeatResponseForContainersDecreasing( nodeHeartBeatResponse); populateKeys(request, nodeHeartBeatResponse); ConcurrentMap<ApplicationId, ByteBuffer> systemCredentials = rmContext.getSystemCredentialsForApps(); if (!systemCredentials.isEmpty()) { nodeHeartBeatResponse.setSystemCredentialsForApps(systemCredentials); } // 4. Send status to RMNode, saving the latest response. RMNodeStatusEvent nodeStatusEvent = new RMNodeStatusEvent(nodeId, remoteNodeStatus, nodeHeartBeatResponse); if (request.getLogAggregationReportsForApps() != null && !request.getLogAggregationReportsForApps().isEmpty()) { nodeStatusEvent.setLogAggregationReportsForApps(request .getLogAggregationReportsForApps()); } this.rmContext.getDispatcher().getEventHandler().handle(nodeStatusEvent); // 5. Update node's labels to RM's NodeLabelManager. if (isDistributedNodeLabelsConf && request.getNodeLabels() != null) { try { updateNodeLabelsFromNMReport( NodeLabelsUtils.convertToStringSet(request.getNodeLabels()), nodeId); nodeHeartBeatResponse.setAreNodeLabelsAcceptedByRM(true); } catch (IOException ex) { //ensure the error message is captured and sent across in response nodeHeartBeatResponse.setDiagnosticsMessage(ex.getMessage()); nodeHeartBeatResponse.setAreNodeLabelsAcceptedByRM(false); } } return nodeHeartBeatResponse; } /** * Check if node in decommissioning state. * @param nodeId */ private boolean isNodeInDecommissioning(NodeId nodeId) { RMNode rmNode = this.rmContext.getRMNodes().get(nodeId); if (rmNode != null && rmNode.getState().equals(NodeState.DECOMMISSIONING)) { return true; } return false; } @SuppressWarnings("unchecked") @Override public UnRegisterNodeManagerResponse unRegisterNodeManager( UnRegisterNodeManagerRequest request) throws YarnException, IOException { UnRegisterNodeManagerResponse response = recordFactory .newRecordInstance(UnRegisterNodeManagerResponse.class); NodeId nodeId = request.getNodeId(); RMNode rmNode = this.rmContext.getRMNodes().get(nodeId); if (rmNode == null) { LOG.info("Node not found, ignoring the unregister from node id : " + nodeId); return response; } LOG.info("Node with node id : " + nodeId + " has shutdown, hence unregistering the node."); this.nmLivelinessMonitor.unregister(nodeId); this.rmContext.getDispatcher().getEventHandler() .handle(new RMNodeEvent(nodeId, RMNodeEventType.SHUTDOWN)); return response; } private void updateNodeLabelsFromNMReport(Set<String> nodeLabels, NodeId nodeId) throws IOException { try { Map<NodeId, Set<String>> labelsUpdate = new HashMap<NodeId, Set<String>>(); labelsUpdate.put(nodeId, nodeLabels); this.rmContext.getNodeLabelManager().replaceLabelsOnNode(labelsUpdate); if (LOG.isDebugEnabled()) { LOG.debug("Node Labels {" + StringUtils.join(",", nodeLabels) + "} from Node " + nodeId + " were Accepted from RM"); } } catch (IOException ex) { StringBuilder errorMessage = new StringBuilder(); errorMessage.append("Node Labels {") .append(StringUtils.join(",", nodeLabels)) .append("} reported from NM with ID ").append(nodeId) .append(" was rejected from RM with exception message as : ") .append(ex.getMessage()); LOG.error(errorMessage, ex); throw new IOException(errorMessage.toString(), ex); } } private void populateKeys(NodeHeartbeatRequest request, NodeHeartbeatResponse nodeHeartBeatResponse) { // Check if node's masterKey needs to be updated and if the currentKey has // roller over, send it across // ContainerTokenMasterKey MasterKey nextMasterKeyForNode = this.containerTokenSecretManager.getNextKey(); if (nextMasterKeyForNode != null && (request.getLastKnownContainerTokenMasterKey().getKeyId() != nextMasterKeyForNode.getKeyId())) { nodeHeartBeatResponse.setContainerTokenMasterKey(nextMasterKeyForNode); } // NMTokenMasterKey nextMasterKeyForNode = this.nmTokenSecretManager.getNextKey(); if (nextMasterKeyForNode != null && (request.getLastKnownNMTokenMasterKey().getKeyId() != nextMasterKeyForNode.getKeyId())) { nodeHeartBeatResponse.setNMTokenMasterKey(nextMasterKeyForNode); } } /** * resolving the network topology. * @param hostName the hostname of this node. * @return the resolved {@link Node} for this nodemanager. */ public static Node resolve(String hostName) { return RackResolver.resolve(hostName); } void refreshServiceAcls(Configuration configuration, PolicyProvider policyProvider) { this.server.refreshServiceAclWithLoadedConfiguration(configuration, policyProvider); } @VisibleForTesting public Server getServer() { return this.server; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.android.project.api; import java.awt.Image; import java.beans.PropertyChangeListener; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; import javax.swing.Action; import javax.swing.Icon; import javax.swing.ImageIcon; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import nbandroid.gradle.spi.ModelRefresh; import nbandroid.gradle.spi.RootGoalsNavigatorHint; import org.gradle.tooling.model.gradle.GradleBuild; import org.netbeans.api.annotations.common.StaticResource; import org.netbeans.api.project.FileOwnerQuery; import org.netbeans.api.project.Project; import org.netbeans.api.project.ProjectInformation; import org.netbeans.modules.android.project.actions.RootProjectActionProvider; import static org.netbeans.modules.android.project.api.NbAndroidProject.RP; import org.netbeans.modules.android.project.api.nodes.MultiNodeFactory; import org.netbeans.modules.android.project.api.nodes.MultiNodeFactoryProvider; import org.netbeans.modules.android.project.api.nodes.NodeFactory; import org.netbeans.modules.android.project.properties.AndroidRootCustomizerProvider; import org.netbeans.modules.android.project.properties.actions.ConfigurationsProjectAction; import org.netbeans.spi.project.ActionProvider; import org.netbeans.spi.project.ProjectState; import org.netbeans.spi.project.ui.LogicalViewProvider; import org.netbeans.spi.project.ui.support.CommonProjectActions; import org.netbeans.spi.project.ui.support.ProjectSensitiveActions; import org.openide.filesystems.FileObject; import org.openide.loaders.DataFolder; import org.openide.loaders.DataObjectNotFoundException; import org.openide.nodes.AbstractNode; import org.openide.nodes.Children; import org.openide.nodes.FilterNode; import org.openide.nodes.Node; import org.openide.util.Exceptions; import org.openide.util.ImageUtilities; import org.openide.util.Lookup; import org.openide.util.lookup.Lookups; import org.openide.util.lookup.ProxyLookup; /** * * @author arsi */ public class NbAndroidRootProjectImpl extends NbAndroidProject { @StaticResource() public static final String PROJECT_ICON = "org/netbeans/modules/android/api/root_project.png"; private final ConfigurationsProjectAction configurationsProjectAction ; public NbAndroidRootProjectImpl(FileObject projectDirectory, ProjectState ps) { super(projectDirectory, ps); ic.add((ProjectInformation) new Info()); configurationsProjectAction = new ConfigurationsProjectAction(this); } @Override protected void registerLookup() { ic.add(new NbAndroidProjectConfigurationProvider(auxiliaryProperties)); ic.add(new AndroidRootCustomizerProvider(this)); } @Override protected LogicalViewProvider getLogicalViewProvider() { return new CustomerProjectLogicalView(); } @Override protected ActionProvider getProjectActionProvider() { return new RootProjectActionProvider(this); } @Override protected Class[] getGradleModels() { return new Class[]{GradleBuild.class}; } @Override public ModelRefresh getModelRefresh() { return new ModelRefresh() { @Override public void refreshModels() { RP.execute(NbAndroidRootProjectImpl.this); RP.execute(new Runnable() { @Override public void run() { Enumeration<? extends FileObject> children = getProjectDirectory().getChildren(false); while (children.hasMoreElements()) { FileObject fo = children.nextElement(); if (fo.isFolder()) { Project owner = FileOwnerQuery.getOwner(fo); if (owner instanceof NbAndroidProjectImpl) { RP.execute((NbAndroidProjectImpl) owner); } } } } }); } }; } public final class CustomerProjectLogicalView implements LogicalViewProvider { @Override public Node createLogicalView() { FileObject projectDirectory = NbAndroidRootProjectImpl.this.getProjectDirectory(); DataFolder projectFolder = DataFolder.findFolder(projectDirectory); Node nodeOfProjectFolder = projectFolder.getNodeDelegate(); try { return new ProjectNode(nodeOfProjectFolder); } catch (DataObjectNotFoundException ex) { Exceptions.printStackTrace(ex); return new AbstractNode(Children.LEAF); } } @Override public Node findPath(Node node, Object o) { return null; } } private final class ProjectNode extends FilterNode { public ProjectNode(Node node) throws DataObjectNotFoundException { super(node, new ProjectNodes(), new ProxyLookup( new Lookup[]{ Lookups.singleton(NbAndroidRootProjectImpl.this), node.getLookup(), Lookups.singleton(new RootGoalsNavigatorHint()), lookupModels })); } @Override public Action[] getActions(boolean arg0) { return new Action[]{ ProjectSensitiveActions.projectCommandAction(ActionProvider.COMMAND_REBUILD, "Clean and Build", null), ProjectSensitiveActions.projectCommandAction(ActionProvider.COMMAND_CLEAN, "Clean", null), ProjectSensitiveActions.projectCommandAction(ActionProvider.COMMAND_BUILD, "Buld", null), configurationsProjectAction, CommonProjectActions.setAsMainProjectAction(), CommonProjectActions.customizeProjectAction(), CommonProjectActions.closeProjectAction() }; } @Override public Image getIcon(int type) { return ImageUtilities.loadImage(PROJECT_ICON); } @Override public Image getOpenedIcon(int type) { return getIcon(type); } @Override public String getDisplayName() { return NbAndroidRootProjectImpl.this.getProjectDirectory().getName(); } } public class ProjectNodes extends Children.Keys<Node> implements ChangeListener { private final List<MultiNodeFactory> multiNodeFactories = new ArrayList<>(); private final List<Node> staticNodes = new ArrayList<>(); public ProjectNodes() { super(true); List<MultiNodeFactoryProvider> factoryProviders = MultiNodeFactoryProvider.findAllForRoot(); for (MultiNodeFactoryProvider factoryProvider : factoryProviders) { MultiNodeFactory createMultiNodeFactory = factoryProvider.createMultiNodeFactory(NbAndroidRootProjectImpl.this); multiNodeFactories.add(createMultiNodeFactory); createMultiNodeFactory.addChangeListener(this); } List<NodeFactory> findAll = NodeFactory.findAllForRoot(); for (int i = 0; i < findAll.size(); i++) { NodeFactory factory = findAll.get(i); Node node = factory.createNode(NbAndroidRootProjectImpl.this); if (node != null) { staticNodes.add(node); } } refreshNodes(); } private void refreshNodes() { List<Node> childrens = new ArrayList<>(); for (MultiNodeFactory multiNodeFactory : multiNodeFactories) { childrens.addAll(multiNodeFactory.createNodes()); } childrens.addAll(staticNodes); setKeys(childrens); } @Override protected Node[] createNodes(Node key) { return new Node[]{key}; } @Override public void stateChanged(ChangeEvent e) { refreshNodes(); } } public final class Info implements ProjectInformation { @Override public Icon getIcon() { return new ImageIcon(ImageUtilities.loadImage(PROJECT_ICON)); } @Override public String getName() { return getProjectDirectory().getName(); } @Override public String getDisplayName() { return getName(); } @Override public void addPropertyChangeListener(PropertyChangeListener pcl) { //do nothing, won't change } @Override public void removePropertyChangeListener(PropertyChangeListener pcl) { //do nothing, won't change } @Override public Project getProject() { return NbAndroidRootProjectImpl.this; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.metadata.bookkeeper; import java.io.File; import java.io.IOException; import java.net.NetworkInterface; import java.net.SocketException; import java.util.ArrayList; import java.util.Collections; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.apache.bookkeeper.bookie.Bookie; import org.apache.bookkeeper.client.BookKeeper; import org.apache.bookkeeper.common.allocator.PoolingPolicy; import org.apache.bookkeeper.conf.ClientConfiguration; import org.apache.bookkeeper.conf.ServerConfiguration; import org.apache.bookkeeper.net.BookieId; import org.apache.bookkeeper.proto.BookieServer; import org.apache.bookkeeper.replication.AutoRecoveryMain; import org.apache.bookkeeper.replication.ReplicationException.CompatibilityException; import org.apache.bookkeeper.replication.ReplicationException.UnavailableException; import org.apache.bookkeeper.stats.NullStatsLogger; import org.apache.bookkeeper.util.IOUtils; import org.apache.bookkeeper.util.PortManager; import org.apache.commons.io.FileUtils; import org.apache.pulsar.metadata.api.MetadataStoreConfig; import org.apache.pulsar.metadata.api.extended.MetadataStoreExtended; /** * A class runs several bookie servers for testing. */ @Slf4j public class BKCluster implements AutoCloseable { // Metadata service related variables private final String metadataServiceUri; @Getter private final MetadataStoreExtended store; // BookKeeper related variables private final List<File> tmpDirs = new ArrayList<>(); private final List<BookieServer> bs = new ArrayList<>(); private final List<ServerConfiguration> bsConfs = new ArrayList<>(); protected final ServerConfiguration baseConf = newBaseServerConfiguration(); protected final ClientConfiguration baseClientConf = newBaseClientConfiguration(); public BKCluster(String metadataServiceUri, int numBookies) throws Exception { this.metadataServiceUri = metadataServiceUri; this.store = MetadataStoreExtended.create(metadataServiceUri, MetadataStoreConfig.builder().build()); baseConf.setJournalRemovePagesFromCache(false); baseConf.setProperty(AbstractMetadataDriver.METADATA_STORE_INSTANCE, store); baseClientConf.setProperty(AbstractMetadataDriver.METADATA_STORE_INSTANCE, store); System.setProperty("bookkeeper.metadata.bookie.drivers", PulsarMetadataBookieDriver.class.getName()); System.setProperty("bookkeeper.metadata.client.drivers", PulsarMetadataClientDriver.class.getName()); startBKCluster(numBookies); } private final Map<BookieServer, AutoRecoveryMain> autoRecoveryProcesses = new HashMap<>(); @Getter boolean isAutoRecoveryEnabled = false; @Override public void close() throws Exception { boolean failed = false; // stop bookkeeper service try { stopBKCluster(); } catch (Exception e) { log.error("Got Exception while trying to stop BKCluster", e); } // cleanup temp dirs try { cleanupTempDirs(); } catch (Exception e) { log.error("Got Exception while trying to cleanupTempDirs", e); } this.store.close(); } private File createTempDir(String prefix, String suffix) throws IOException { File dir = IOUtils.createTempDir(prefix, suffix); tmpDirs.add(dir); return dir; } /** * Start cluster. Also, starts the auto recovery process for each bookie, if * isAutoRecoveryEnabled is true. * * @throws Exception */ private void startBKCluster(int numBookies) throws Exception { PulsarRegistrationManager rm = new PulsarRegistrationManager(store, "/ledgers", baseConf); rm.initNewCluster(); baseConf.setMetadataServiceUri("metadata-store:" + metadataServiceUri); baseClientConf.setMetadataServiceUri("metadata-store:" + metadataServiceUri); // Create Bookie Servers (B1, B2, B3) for (int i = 0; i < numBookies; i++) { startNewBookie(); } } public BookKeeper newClient() throws Exception { return new BookKeeper(baseClientConf); } /** * Stop cluster. Also, stops all the auto recovery processes for the bookie * cluster, if isAutoRecoveryEnabled is true. * * @throws Exception */ protected void stopBKCluster() throws Exception { for (BookieServer server : bs) { server.shutdown(); AutoRecoveryMain autoRecovery = autoRecoveryProcesses.get(server); if (autoRecovery != null && isAutoRecoveryEnabled()) { autoRecovery.shutdown(); log.debug("Shutdown auto recovery for bookieserver:" + server.getBookieId()); } } bs.clear(); } protected void cleanupTempDirs() throws Exception { for (File f : tmpDirs) { FileUtils.deleteDirectory(f); } } private ServerConfiguration newServerConfiguration() throws Exception { File f = createTempDir("bookie", "test"); int port; if (baseConf.isEnableLocalTransport() || !baseConf.getAllowEphemeralPorts()) { port = PortManager.nextFreePort(); } else { port = 0; } return newServerConfiguration(port, f, new File[]{f}); } private ClientConfiguration newClientConfiguration() { return new ClientConfiguration(baseConf); } private ServerConfiguration newServerConfiguration(int port, File journalDir, File[] ledgerDirs) { ServerConfiguration conf = new ServerConfiguration(baseConf); conf.setBookiePort(port); conf.setJournalDirName(journalDir.getPath()); String[] ledgerDirNames = new String[ledgerDirs.length]; for (int i = 0; i < ledgerDirs.length; i++) { ledgerDirNames[i] = ledgerDirs[i].getPath(); } conf.setLedgerDirNames(ledgerDirNames); conf.setEnableTaskExecutionStats(true); conf.setAllocatorPoolingPolicy(PoolingPolicy.UnpooledHeap); return conf; } protected void stopAllBookies() throws Exception { stopAllBookies(true); } protected void stopAllBookies(boolean shutdownClient) throws Exception { for (BookieServer server : bs) { server.shutdown(); } bsConfs.clear(); bs.clear(); } protected void startAllBookies() throws Exception { for (ServerConfiguration conf : bsConfs) { bs.add(startBookie(conf)); } } /** * Helper method to startup a new bookie server with the indicated port * number. Also, starts the auto recovery process, if the * isAutoRecoveryEnabled is set true. * * @throws IOException */ public int startNewBookie() throws Exception { ServerConfiguration conf = newServerConfiguration(); bsConfs.add(conf); log.info("Starting new bookie on port: {}", conf.getBookiePort()); BookieServer server = startBookie(conf); bs.add(server); return server.getLocalAddress().getPort(); } /** * Helper method to startup a bookie server using a configuration object. * Also, starts the auto recovery process if isAutoRecoveryEnabled is true. * * @param conf * Server Configuration Object * */ protected BookieServer startBookie(ServerConfiguration conf) throws Exception { BookieServer server = new BookieServer(conf, NullStatsLogger.INSTANCE, null); BookieId address = Bookie.getBookieId(conf); server.start(); // Wait for up to 30 seconds for the bookie to start for (int i = 0; i < 3000; i++) { if (server.isRunning()) { break; } Thread.sleep(10); } if (!server.isRunning()) { throw new RuntimeException("Bookie failed to start within timeout period"); } log.info("New bookie '{}' has been created.", address); try { startAutoRecovery(server, conf); } catch (CompatibilityException ce) { log.error("Exception while starting AutoRecovery!", ce); } catch (UnavailableException ue) { log.error("Exception while starting AutoRecovery!", ue); } return server; } private void startAutoRecovery(BookieServer bserver, ServerConfiguration conf) throws Exception { if (isAutoRecoveryEnabled()) { AutoRecoveryMain autoRecoveryProcess = new AutoRecoveryMain(conf); autoRecoveryProcess.start(); autoRecoveryProcesses.put(bserver, autoRecoveryProcess); log.debug("Starting Auditor Recovery for the bookie:" + bserver.getBookieId()); } } private static ServerConfiguration newBaseServerConfiguration() { ServerConfiguration confReturn = new ServerConfiguration(); confReturn.setTLSEnabledProtocols("TLSv1.2,TLSv1.1"); confReturn.setJournalFlushWhenQueueEmpty(true); confReturn.setJournalFormatVersionToWrite(5); confReturn.setAllowEphemeralPorts(true); confReturn.setJournalWriteData(false); confReturn.setBookiePort(0); confReturn.setGcWaitTime(1000L); confReturn.setDiskUsageThreshold(0.999F); confReturn.setDiskUsageWarnThreshold(0.99F); confReturn.setAllocatorPoolingPolicy(PoolingPolicy.UnpooledHeap); confReturn.setProperty("dbStorage_writeCacheMaxSizeMb", 4); confReturn.setProperty("dbStorage_readAheadCacheMaxSizeMb", 4); setLoopbackInterfaceAndAllowLoopback(confReturn); return confReturn; } public static ClientConfiguration newBaseClientConfiguration() { ClientConfiguration clientConfiguration = new ClientConfiguration(); clientConfiguration.setTLSEnabledProtocols("TLSv1.2,TLSv1.1"); return clientConfiguration; } private static String getLoopbackInterfaceName() { try { Enumeration<NetworkInterface> nifs = NetworkInterface.getNetworkInterfaces(); Iterator<NetworkInterface> var1 = Collections.list(nifs).iterator(); while (var1.hasNext()) { NetworkInterface nif = var1.next(); if (nif.isLoopback()) { return nif.getName(); } } } catch (SocketException var3) { log.warn("Exception while figuring out loopback interface. Will use null.", var3); return null; } log.warn("Unable to deduce loopback interface. Will use null"); return null; } private static ServerConfiguration setLoopbackInterfaceAndAllowLoopback(ServerConfiguration serverConf) { serverConf.setListeningInterface(getLoopbackInterfaceName()); serverConf.setAllowLoopback(true); return serverConf; } }
package net.bytebuddy.implementation.attribute; import net.bytebuddy.ClassFileVersion; import net.bytebuddy.description.annotation.AnnotationDescription; import net.bytebuddy.description.method.MethodDescription; import net.bytebuddy.description.method.MethodList; import net.bytebuddy.description.type.TypeDescription; import net.bytebuddy.dynamic.loading.ByteArrayClassLoader; import net.bytebuddy.dynamic.loading.PackageDefinitionStrategy; import net.bytebuddy.test.utility.MockitoRule; import net.bytebuddy.test.utility.ObjectPropertyAssertion; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestRule; import org.mockito.Mock; import org.objectweb.asm.AnnotationVisitor; import org.objectweb.asm.ClassWriter; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; import java.lang.annotation.Annotation; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.security.AccessController; import java.security.ProtectionDomain; import java.util.Collections; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import static org.mockito.Mockito.*; public class AnnotationAppenderDefaultTest { private static final ProtectionDomain DEFAULT_PROTECTION_DOMAIN = null; private static final int ASM_MANUAL = 0; private static final String BAR = "net.bytebuddy.test.Bar"; private static final String FOOBAR = "foobar"; @Rule public TestRule mockitoRule = new MockitoRule(this); @Mock private AnnotationAppender.Target target; @Mock private AnnotationAppender.ValueFilter valueFilter; private AnnotationAppender annotationAppender; @Before public void setUp() throws Exception { annotationAppender = new AnnotationAppender.Default(target, valueFilter); } @Test public void testNoArgumentAnnotation() throws Exception { Class<?> bar = makeTypeWithAnnotation(new Foo.Instance()); assertThat(bar.getAnnotations().length, is(1)); assertThat(bar.isAnnotationPresent(Foo.class), is(true)); } @Test public void testNoArgumentAnnotationSourceCodeRetention() throws Exception { Class<?> bar = makeTypeWithAnnotation(new FooSourceCodeRetention.Instance()); assertThat(bar.getAnnotations().length, is(0)); } @Test public void testNoArgumentAnnotationByteCodeRetention() throws Exception { Class<?> bar = makeTypeWithAnnotation(new FooByteCodeRetention.Instance()); assertThat(bar.getAnnotations().length, is(0)); } @Test public void testNoArgumentAnnotationNoRetention() throws Exception { Class<?> bar = makeTypeWithAnnotation(new FooNoRetention.Instance()); assertThat(bar.getAnnotations().length, is(0)); } @Test public void testSingleArgumentAnnotation() throws Exception { Class<?> bar = makeTypeWithAnnotation(new Qux.Instance(FOOBAR)); assertThat(bar.getAnnotations().length, is(1)); assertThat(bar.isAnnotationPresent(Qux.class), is(true)); assertThat(bar.getAnnotation(Qux.class).value(), is(FOOBAR)); } @Test public void testMultipleArgumentAnnotation() throws Exception { int[] array = {2, 3, 4}; Class<?> bar = makeTypeWithAnnotation(new Baz.Instance(FOOBAR, array, new Foo.Instance(), Baz.Enum.VALUE, Void.class)); assertThat(bar.getAnnotations().length, is(1)); assertThat(bar.isAnnotationPresent(Baz.class), is(true)); assertThat(bar.getAnnotation(Baz.class).value(), is(FOOBAR)); assertThat(bar.getAnnotation(Baz.class).array(), is(array)); assertThat(bar.getAnnotation(Baz.class).annotation(), is((Foo) new Foo.Instance())); assertThat(bar.getAnnotation(Baz.class).enumeration(), is(Baz.Enum.VALUE)); assertEquals(Void.class, bar.getAnnotation(Baz.class).type()); } private Class<?> makeTypeWithAnnotation(Annotation annotation) throws Exception { when(valueFilter.isRelevant(any(AnnotationDescription.class), any(MethodDescription.InDefinedShape.class))).thenReturn(true); ClassWriter classWriter = new ClassWriter(ASM_MANUAL); classWriter.visit(ClassFileVersion.forCurrentJavaVersion().getVersion(), Opcodes.ACC_PUBLIC, BAR.replace('.', '/'), null, Type.getInternalName(Object.class), null); AnnotationVisitor annotationVisitor = classWriter.visitAnnotation(Type.getDescriptor(annotation.annotationType()), true); when(target.visit(any(String.class), anyBoolean())).thenReturn(annotationVisitor); AnnotationDescription annotationDescription = AnnotationDescription.ForLoadedAnnotation.of(annotation); AnnotationAppender.AnnotationVisibility annotationVisibility = AnnotationAppender.AnnotationVisibility.of(annotationDescription); annotationAppender.append(annotationDescription, annotationVisibility); switch (annotationVisibility) { case RUNTIME: case CLASS_FILE: verify(target).visit(Type.getDescriptor(annotation.annotationType()), annotationVisibility == AnnotationAppender.AnnotationVisibility.RUNTIME); verifyNoMoreInteractions(target); for (MethodDescription.InDefinedShape methodDescription : annotationDescription.getAnnotationType().getDeclaredMethods()) { verify(valueFilter).isRelevant(annotationDescription, methodDescription); } verifyNoMoreInteractions(valueFilter); break; case INVISIBLE: verifyZeroInteractions(target); break; default: fail("Unknown annotation visibility"); } classWriter.visitEnd(); Class<?> bar = new ByteArrayClassLoader(getClass().getClassLoader(), Collections.singletonMap(BAR, classWriter.toByteArray()), DEFAULT_PROTECTION_DOMAIN, AccessController.getContext(), ByteArrayClassLoader.PersistenceHandler.LATENT, PackageDefinitionStrategy.NoOp.INSTANCE).loadClass(BAR); assertThat(bar.getName(), is(BAR)); assertEquals(Object.class, bar.getSuperclass()); return bar; } @Test @SuppressWarnings("unchecked") public void testSkipValues() throws Exception { when(valueFilter.isRelevant(any(AnnotationDescription.class), any(MethodDescription.InDefinedShape.class))).thenReturn(false); MethodDescription.InDefinedShape methodDescription = mock(MethodDescription.InDefinedShape.class); TypeDescription annotationType = mock(TypeDescription.class); when(annotationType.getDeclaredMethods()) .thenReturn((MethodList) new MethodList.Explicit<MethodDescription>(Collections.singletonList(methodDescription))); AnnotationDescription annotationDescription = mock(AnnotationDescription.class); when(annotationDescription.getAnnotationType()).thenReturn(annotationType); AnnotationVisitor annotationVisitor = mock(AnnotationVisitor.class); when(target.visit(anyString(), anyBoolean())).thenReturn(annotationVisitor); annotationAppender.append(annotationDescription, AnnotationAppender.AnnotationVisibility.RUNTIME); verify(valueFilter).isRelevant(annotationDescription, methodDescription); verifyNoMoreInteractions(valueFilter); verify(annotationVisitor).visitEnd(); verifyNoMoreInteractions(annotationVisitor); } @Test public void testObjectProperties() throws Exception { ObjectPropertyAssertion.of(AnnotationAppender.Default.class).apply(); ObjectPropertyAssertion.of(AnnotationAppender.AnnotationVisibility.class).apply(); } @Retention(RetentionPolicy.RUNTIME) public @interface Foo { class Instance implements Foo { @Override public Class<? extends Annotation> annotationType() { return Foo.class; } } } @Retention(RetentionPolicy.SOURCE) public @interface FooSourceCodeRetention { class Instance implements FooSourceCodeRetention { @Override public Class<? extends Annotation> annotationType() { return FooSourceCodeRetention.class; } } } @Retention(RetentionPolicy.CLASS) public @interface FooByteCodeRetention { class Instance implements FooByteCodeRetention { @Override public Class<? extends Annotation> annotationType() { return FooByteCodeRetention.class; } } } public @interface FooNoRetention { class Instance implements FooNoRetention { @Override public Class<? extends Annotation> annotationType() { return FooNoRetention.class; } } } @Retention(RetentionPolicy.RUNTIME) public @interface Qux { String value(); class Instance implements Qux { private final String value; public Instance(String value) { this.value = value; } @Override public String value() { return value; } @Override public Class<? extends Annotation> annotationType() { return Qux.class; } } } @Retention(RetentionPolicy.RUNTIME) public @interface Baz { String value(); int[] array(); Foo annotation(); Enum enumeration(); Class<?> type(); enum Enum { VALUE } class Instance implements Baz { private final String value; private final int[] array; private final Foo annotation; private final Enum enumeration; private final Class<?> type; public Instance(String value, int[] array, Foo annotation, Enum enumeration, Class<?> type) { this.value = value; this.array = array; this.annotation = annotation; this.enumeration = enumeration; this.type = type; } @Override public String value() { return value; } @Override public int[] array() { return array; } @Override public Foo annotation() { return annotation; } @Override public Enum enumeration() { return enumeration; } @Override public Class<?> type() { return type; } @Override public Class<? extends Annotation> annotationType() { return Baz.class; } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.wings.portal.controllers; import java.io.*; import java.net.URL; import java.util.*; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.regex.Pattern; import javax.servlet.ServletContext; import javax.ws.rs.core.Response; import edu.isi.wings.opmm.Catalog; import edu.isi.wings.portal.classes.config.Publisher; import edu.isi.wings.portal.classes.config.ServerDetails; import edu.isi.wings.portal.classes.util.PlanningAPIBindings; import edu.isi.wings.portal.classes.util.PlanningAndExecutingThread; import edu.isi.wings.portal.classes.util.TemplateBindings; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.http.HttpResponse; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.HttpPut; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; import edu.isi.wings.opmm.WorkflowExecutionExport; import edu.isi.wings.opmm.WorkflowTemplateExport; import edu.isi.kcap.ontapi.KBTriple; import edu.isi.wings.catalog.component.ComponentFactory; import edu.isi.wings.catalog.data.DataFactory; import edu.isi.wings.catalog.resource.ResourceFactory; import edu.isi.wings.common.URIEntity; import edu.isi.wings.common.UuidGen; import edu.isi.wings.execution.engine.api.PlanExecutionEngine; import edu.isi.wings.execution.engine.classes.RuntimeInfo; import edu.isi.wings.execution.engine.classes.RuntimeInfo.Status; import edu.isi.wings.execution.engine.classes.RuntimePlan; import edu.isi.wings.execution.engine.classes.RuntimeStep; import edu.isi.wings.execution.tools.api.ExecutionMonitorAPI; import edu.isi.wings.planner.api.WorkflowGenerationAPI; import edu.isi.wings.planner.api.impl.kb.WorkflowGenerationKB; import edu.isi.wings.portal.classes.config.Config; import edu.isi.wings.portal.classes.JsonHandler; import edu.isi.wings.workflow.plan.api.ExecutionPlan; import edu.isi.wings.workflow.plan.api.ExecutionStep; import edu.isi.wings.workflow.plan.classes.ExecutionFile; import edu.isi.wings.workflow.template.TemplateFactory; import edu.isi.wings.workflow.template.api.Template; import edu.isi.wings.workflow.template.api.TemplateCreationAPI; import edu.isi.wings.workflow.template.classes.variables.Variable; import com.google.gson.Gson; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonParser; public class RunController { public Config config; public Gson json; public String dataUrl; public String templateUrl; private Properties props; public static ExecutorService executor = Executors.newSingleThreadExecutor(); public static HashMap<String, PlanningAPIBindings> apiBindings = new HashMap<String, PlanningAPIBindings>(); public RunController(Config config) { this.config = config; this.json = JsonHandler.createRunGson(); this.props = config.getProperties(); this.dataUrl = config.getUserDomainUrl() + "/data"; this.templateUrl = config.getUserDomainUrl() + "/workflows"; } public void end() { } /** * Get the run list json. * @param pattern optional, a pattern to filter * @param status optional, a pattern to filter complete runs * @param start optional, start offset (for paging) (set to -1 to ignore) * @param limit optional, number of runs to return (for paging) (set to -1 to ignore) * @return */ public String getRunListJSON(String pattern, String status, int start, int limit) { HashMap<String, Object> result = new HashMap<String, Object>(); int numberOfRuns = this.getNumberOfRuns(pattern, status, null); boolean fasterQuery = numberOfRuns > 1000; result.put("success", true); result.put("results", numberOfRuns); result.put("rows", this.getRunList(pattern, status, start, limit, fasterQuery)); return json.toJson(result); } public String getRunListSimpleJSON(String pattern, String status, int start, int limit, Date started_after) { HashMap<String, Object> result = new HashMap<String, Object>(); result.put("success", true); result.put("results", this.getNumberOfRuns(pattern, status, started_after)); result.put("rows", this.getRunListSimple(pattern, status, start, limit, started_after)); return json.toJson(result); } public ArrayList<HashMap<String, Object>> getRunListSimple(String pattern, String status, int start, int limit, Date started_after) { ExecutionMonitorAPI monitor = config.getDomainExecutionMonitor(); ArrayList<HashMap<String, Object>> list = new ArrayList<HashMap<String, Object>>(); for (RuntimePlan exe : monitor.getRunListSimple(pattern, status, start, limit, started_after)) { HashMap<String, Object> map = new HashMap<String, Object>(); map.put("runtimeInfo", exe.getRuntimeInfo()); map.put("template_id", exe.getOriginalTemplateID()); map.put("id", exe.getID()); list.add(map); } return list; } public ArrayList<HashMap<String, Object>> getRunList(String pattern, String status, int start, int limit, boolean fasterQuery) { ExecutionMonitorAPI monitor = config.getDomainExecutionMonitor(); ArrayList<HashMap<String, Object>> list = new ArrayList<HashMap<String, Object>>(); for (RuntimePlan exe : monitor.getRunList(pattern, status, start, limit, fasterQuery)) { HashMap<String, Object> map = new HashMap<String, Object>(); map.put("runtimeInfo", exe.getRuntimeInfo()); map.put("template_id", exe.getOriginalTemplateID()); map.put("id", exe.getID()); if (exe.getQueue() != null) { int numtotal = exe.getQueue().getAllSteps().size(); int numdone = exe.getQueue().getFinishedSteps().size(); ArrayList<RuntimeStep> running_steps = exe.getQueue().getRunningSteps(); ArrayList<RuntimeStep> failed_steps = exe.getQueue().getFailedSteps(); map.put("running_jobs", this.getStepIds(running_steps)); map.put("failed_jobs", this.getStepIds(failed_steps)); if (numtotal > 0) { map.put("percent_done", numdone * 100.0 / numtotal); map.put("percent_running", running_steps.size() * 100.0 / numtotal); map.put("percent_failed", failed_steps.size() * 100.0 / numtotal); } } list.add(map); } return list; } public int getNumberOfRuns(String pattern, String status, Date started_after) { ExecutionMonitorAPI monitor = config.getDomainExecutionMonitor(); return monitor.getNumberOfRuns(pattern, status, started_after); } private String getStepIds(ArrayList<RuntimeStep> steps) { ArrayList<String> ids = new ArrayList<String>(); for (RuntimeStep stepexe : steps) { ids.add(stepexe.getName()); } return ids.toString(); } public String getRunJSON(String runid) { ExecutionMonitorAPI monitor = config.getDomainExecutionMonitor(); Map<String, Object> returnmap = new HashMap<String, Object>(); RuntimePlan planexe = monitor.getRunDetails(runid); if (planexe != null && planexe.getPlan() != null) { for (ExecutionStep step : planexe.getPlan().getAllExecutionSteps()) { for (ExecutionFile file : step.getInputFiles()) { file.loadMetadataFromLocation(); } for (ExecutionFile file : step.getOutputFiles()) { file.loadMetadataFromLocation(); } } TemplateCreationAPI tc = TemplateFactory.getCreationAPI(props); Template tpl = tc.getTemplate(planexe.getExpandedTemplateID()); tc.end(); Map<String, Object> variables = new HashMap<String, Object>(); variables.put("input", tpl.getInputVariables()); variables.put("intermediate", tpl.getIntermediateVariables()); variables.put("output", tpl.getOutputVariables()); returnmap.put("variables", variables); returnmap.put("constraints", this.getShortConstraints(tpl)); } returnmap.put("execution", planexe); returnmap.put("published_url", this.getPublishedURL(runid)); return json.toJson(returnmap); } public String getRunPlanJSON(String runid) { ExecutionMonitorAPI monitor = config.getDomainExecutionMonitor(); RuntimePlan planexe = monitor.getRunDetails(runid); return json.toJson(planexe); } private String getPublishedURL(String runid) { Publisher publisher = config.getPublisher(); if(publisher == null) return null; /* TODO: Return already published url for the run id if possible */ /* Mapper opmm = new Mapper(); String tstoreurl = publisher.getTstorePublishUrl(); String puburl = publisher.getUrl(); opmm.setPublishExportPrefix(puburl); String rname = runid.substring(runid.indexOf('#') + 1); String runurl = opmm.getRunUrl(rname); // Check if run already published if (graphExists(tstoreurl, runurl)) return runurl;*/ return null; } private Map<String, Object> getShortConstraints(Template tpl) { Map<String, Object> varbindings = new HashMap<String, Object>(); for(Variable v : tpl.getVariables()) { List<Object> constraints = new ArrayList<Object>(); if(v.isParameterVariable()) continue; for(KBTriple t : tpl.getConstraintEngine().getConstraints(v.getID())) { Map<String, Object> cons = new HashMap<String, Object>(); if(t.getPredicate().getName().equals("hasDataBinding")) continue; cons.put("p", t.getPredicate().getName()); cons.put("o", t.getObject()); constraints.add(cons); } varbindings.put(v.getID(), constraints); } return varbindings; } public String deleteRuns(String rjson, ServletContext context) { HashMap<String, Object> ret = new HashMap<String, Object>(); ret.put("success", false); JsonElement listel = new JsonParser().parse(rjson); if (listel == null) return json.toJson(ret); if(listel.isJsonObject()) { return this.deleteRun(rjson, context); } ExecutionMonitorAPI monitor = config.getDomainExecutionMonitor(); JsonArray list = listel.getAsJsonArray(); for(int i=0; i<list.size(); i++) { JsonElement el = list.get(i); String runid = el.getAsJsonObject().get("id").getAsString(); monitor.deleteRun(runid); } ret.put("success", true); return json.toJson(ret); } public String deleteRun(String rjson, ServletContext context) { HashMap<String, Object> ret = new HashMap<String, Object>(); ret.put("success", false); JsonElement el = new JsonParser().parse(rjson); if (el == null) return json.toJson(ret); String runid = el.getAsJsonObject().get("id").getAsString(); ExecutionMonitorAPI monitor = config.getDomainExecutionMonitor(); if(!monitor.deleteRun(runid)) return json.toJson(ret); /* if (monitor.runExists(runid)) { this.stopRun(runid, context); if (!monitor.deleteRun(runid)) return json.toJson(ret); } */ ret.put("success", true); return json.toJson(ret); } public boolean stopRun(String runid, ServletContext context) { ExecutionMonitorAPI monitor = config.getDomainExecutionMonitor(); if (monitor.getRunDetails(runid).getRuntimeInfo().getStatus() == RuntimeInfo.Status.RUNNING) { PlanExecutionEngine engine = (PlanExecutionEngine) context.getAttribute("engine_" + runid); RuntimePlan rplan = (RuntimePlan) context.getAttribute("plan_" + runid); if (engine != null && rplan != null) { engine.abort(rplan); return true; } } return false; } /* Utility function to expand and run the first expanded template - Immediately returns a run id - Puts the rest of the processing in a Queue to be processed sequentially */ public String expandAndRunTemplate(TemplateBindings template_bindings, ServletContext context) { // Create a runid URIEntity tpluri = new URIEntity(template_bindings.getTemplateId()); tpluri.setID(UuidGen.generateURIUuid(tpluri)); String exPrefix = props.getProperty("domain.executions.dir.url"); String runid = exPrefix + "/" + tpluri.getName() + ".owl#" + tpluri.getName(); PlanningAPIBindings apis = null; if(apiBindings.containsKey(exPrefix)) { apis = apiBindings.get(exPrefix); } else { apis = new PlanningAPIBindings(props); apiBindings.put(exPrefix, apis); } // Submit the planning and execution thread executor.submit(new PlanningAndExecutingThread(runid, this.config, template_bindings, apis)); // Return the runid return runid; } public String runExpandedTemplate(String origtplid, String templatejson, String consjson, String seedjson, String seedconsjson, ServletContext context) { Gson json = JsonHandler.createTemplateGson(); Template xtpl = JsonHandler.getTemplateFromJSON(json, templatejson, consjson); xtpl.autoLayout(); Template seedtpl = JsonHandler.getTemplateFromJSON(json, seedjson, seedconsjson); return createPlan(origtplid, context, xtpl, seedtpl); } private String createPlan(String origtplid, ServletContext context, Template xtpl, Template seedtpl) { String requestid = UuidGen.generateAUuid(""); WorkflowGenerationAPI wg = new WorkflowGenerationKB(props, DataFactory.getReasoningAPI(props), ComponentFactory.getReasoningAPI(props), ResourceFactory.getAPI(props), requestid); ExecutionPlan plan = wg.getExecutionPlan(xtpl); String seedid = UuidGen.generateURIUuid((URIEntity) seedtpl); if (plan != null) { // Save the expanded template, seeded template and plan if (!xtpl.save()) return ""; if (!seedtpl.saveAs(seedid)) return ""; if(plan.save()) { RuntimePlan rplan = new RuntimePlan(plan); rplan.setExpandedTemplateID(xtpl.getID()); rplan.setOriginalTemplateID(origtplid); rplan.setSeededTemplateId(seedid); this.runExecutionPlan(rplan, context); return rplan.getID(); } } return ""; } public Response reRunPlan(String run_id, ServletContext context){ ExecutionMonitorAPI monitor = config.getDomainExecutionMonitor(); RuntimePlan plan = monitor.getRunDetails(run_id); TemplateCreationAPI tc = TemplateFactory.getCreationAPI(props); String orig_tp_id = plan.getOriginalTemplateID(); Template xtpl = tc.getTemplate(plan.getExpandedTemplateID()); Template seedtpl = tc.getTemplate(plan.getSeededTemplateID()); tc.end(); if (createPlan(orig_tp_id, context, xtpl, seedtpl) == "") return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity("Internal error").build(); return Response.status(Response.Status.CREATED).entity("CREATED").build(); } // Run the Runtime Plan public void runExecutionPlan(RuntimePlan rplan, ServletContext context) { PlanExecutionEngine engine = config.getDomainExecutionEngine(); // "execute" below is an asynchronous call engine.execute(rplan); // Save the engine for an abort if needed context.setAttribute("plan_" + rplan.getID(), rplan); context.setAttribute("engine_" + rplan.getID(), engine); } // public String publishRunList(String pattern) { // ArrayList<HashMap<String, Object>> runs = this.getRunList(pattern); // ArrayList<HashMap<String, Object>> returnJson = new ArrayList<>(); // Iterator<HashMap<String, Object>> i = runs.iterator(); // // ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newFixedThreadPool(10); // List<Future> futures = new ArrayList<Future>(); // // while (i.hasNext()){ // String id = (String) i.next().get("id"); // futures.add(executor.submit(new Callable<String>() { // public String call() { // try { // return publishRun(id); // } catch (Exception e) { // return ""; // } // } // })); // // } // for(Future f: futures) { // HashMap<String, Object> element = new HashMap<>(); // try { // String jsonReturn = (String) f.get(); // Map<String, String> map = new Gson().fromJson(jsonReturn, Map.class); // if (map.containsKey("url")) // element.put("url", map.get("url")); // returnJson.add(element); // } catch (Exception e) { // e.printStackTrace(); // } // } // return json.toJson(returnJson); // } public String publishRun(String runid) { HashMap<String, String> retmap = new HashMap<String, String>(); ExecutionMonitorAPI monitor = config.getDomainExecutionMonitor(); RuntimePlan plan = monitor.getRunDetails(runid); if (plan.getRuntimeInfo().getStatus() != Status.SUCCESS) { retmap.put("error", "Can only publish successfully completed runs"); } else try { //Mapper opmm = new Mapper(); Publisher publisher = config.getPublisher(); ServerDetails publishUrl = publisher.getUploadServer(); String tstoreurl = publisher.getTstorePublishUrl(); String tstorequery = publisher.getTstoreQueryUrl(); String exportName = publisher.getExportName(); //String upurl = publisher.getUploadServer().getUrl(); String uploadURL = publishUrl.getUrl(); String uploadUsername = publishUrl.getUsername(); String uploadPassword = publishUrl.getPassword(); long uploadMaxSize = publishUrl.getMaxUploadSize(); //opmm.setPublishExportPrefix(puburl); String rname = runid.substring(runid.indexOf('#') + 1); //String runurl = opmm.getRunUrl(rname); // Fetch expanded template (to get data binding ids) TemplateCreationAPI tc = TemplateFactory.getCreationAPI(props); Template xtpl = tc.getTemplate(plan.getExpandedTemplateID()); tc.end(); HashMap<String, String> varBindings = new HashMap<String, String>(); for (Variable var : xtpl.getVariables()) { varBindings.put(var.getID(), var.getBinding().getID()); } // Create a temporary directory to upload/move File _tmpdir = File.createTempFile("temp", ""); File tempdir = new File(_tmpdir.getParent() + "/" + rname); FileUtils.deleteQuietly(tempdir); if (!_tmpdir.delete() || !tempdir.mkdirs()) throw new Exception("Cannot create temp directory"); File datadir = new File(tempdir.getAbsolutePath() + "/data"); File codedir = new File(tempdir.getAbsolutePath() + "/code"); datadir.mkdirs(); codedir.mkdirs(); /* String tupurl = upurl + "/" + tempdir.getName(); String dataurl = tupurl + "/data"; String codeurl = tupurl + "/code"; String cdir = props.getProperty("lib.domain.code.storage"); String ddir = props.getProperty("lib.domain.data.storage"); */ FileUtils.deleteQuietly(tempdir); //Create the temporal directory to store data, components, workflow and exection tempdir.mkdirs(); File dcontdir = new File(tempdir.getAbsolutePath() + "/ont/data"); File acontdir = new File(tempdir.getAbsolutePath() + "/ont/components"); File wflowdir = new File(tempdir.getAbsolutePath() + "/ont/workflows"); File execsdir = new File(tempdir.getAbsolutePath() + "/ont/executions"); File run_exportdir = new File(tempdir.getAbsolutePath() + "/export/run"); File tpl_exportdir = new File(tempdir.getAbsolutePath() + "/export/template"); dcontdir.mkdirs(); acontdir.mkdirs(); wflowdir.mkdirs(); execsdir.mkdirs(); run_exportdir.mkdirs(); tpl_exportdir.mkdirs(); // Merge both concrete and abstract component libraries from WINGS String aclib = props.getProperty("lib.concrete.url"); String abslib = props.getProperty("lib.abstract.url"); //String workflow_lib = props.getProperty("lib.domain.workflow.url"); String aclibdata = IOUtils.toString(new URL(aclib)); String abslibdata = IOUtils.toString(new URL(abslib)); //String workflow_lib_data = IOUtils.toString(new URL(workflow_lib)); abslibdata = abslibdata.replaceFirst("<\\?xml.+?>", ""); abslibdata = Pattern.compile("<rdf:RDF.+?>", Pattern.DOTALL).matcher(abslibdata).replaceFirst(""); abslibdata = abslibdata.replaceFirst("<\\/rdf:RDF>", ""); aclibdata = aclibdata.replaceFirst("<\\/rdf:RDF>", ""); String rplandata = IOUtils.toString(new URL(runid)); //write aclibfie and rplanfile aclibdata += abslibdata + "</rdf:RDF>\n"; File aclibfile = new File(acontdir.getAbsolutePath() + "/library.owl"); File rplanfile = new File(execsdir.getAbsolutePath() + "/" + plan.getName() + ".owl"); FileUtils.write(aclibfile, aclibdata); FileUtils.write(rplanfile, rplandata); //workflow file? URL otplurl = new URL(plan.getOriginalTemplateID()); File otplfile = new File(wflowdir.getAbsolutePath() + "/" + otplurl.getRef() + ".owl"); String otpldata = IOUtils.toString(otplurl); FileUtils.write(otplfile, otpldata); Catalog catalog = new Catalog(config.getDomainId(), exportName, publisher.getDomainsDir(), aclibfile.getAbsolutePath()); WorkflowExecutionExport exp = new WorkflowExecutionExport( rplanfile.getAbsolutePath(), otplfile.getAbsolutePath(), catalog, exportName, tstorequery, config.getDomainId()); exp.setUploadURL(uploadURL); exp.setUploadUsername(uploadUsername); exp.setUploadPassword(uploadPassword); exp.setUploadMaxSize(uploadMaxSize); String serialization = "TURTLE"; //publish the catalog String domainPath = catalog.exportCatalog(null, serialization); File domainFile = new File(domainPath); this.publishFile(tstoreurl, catalog.getDomainGraphURI(), domainFile.getAbsolutePath()); //execution String executionFilePath = run_exportdir + File.separator + "execution"; String graphUri = exp.exportAsOPMW(executionFilePath, serialization); if (!exp.isExecPublished()) { this.publishFile(tstoreurl, graphUri, executionFilePath); //expandedTemplate String expandedTemplateFilePath = run_exportdir + File.separator + "expandedTemplate"; String expandedTemplateGraphUri = exp.getConcreteTemplateExport().exportAsOPMW(expandedTemplateFilePath, serialization); if (!exp.getConcreteTemplateExport().isTemplatePublished()) this.publishFile(tstoreurl, expandedTemplateGraphUri, expandedTemplateFilePath); //abstract WorkflowTemplateExport abstractTemplateExport = exp.getConcreteTemplateExport().getAbstractTemplateExport(); if (abstractTemplateExport != null) { String abstractFilePath = run_exportdir + File.separator + "abstract"; String abstractGraphUri = abstractTemplateExport.exportAsOPMW(abstractFilePath, serialization); if (!abstractTemplateExport.isTemplatePublished()) this.publishFile(tstoreurl, abstractGraphUri, abstractFilePath); } } retmap.put("url", exp.getTransformedExecutionURI()); } catch (Exception e) { e.printStackTrace(); retmap.put("error", e.getMessage()); } return json.toJson(retmap); } /* private void uploadDirectory(ServerDetails server, File tempdir) { if(server.getHost() != null) { Machine m = new Machine(server.getHost()); m.setHostName(server.getHost()); m.setUserId(server.getHostUserId()); m.setUserKey(server.getPrivateKey()); HashMap<String, String> filemap = new HashMap<String, String>(); String srvdir = server.getDirectory(); for(File f : FileUtils.listFiles(tempdir, null, true)) { String fpath = f.getAbsolutePath(); String srvpath = fpath.replace(tempdir.getParent(), srvdir); filemap.put(fpath, srvpath); } GridkitCloud.uploadFiles(m, filemap); } else { try { FileUtils.copyDirectoryToDirectory(tempdir, new File(server.getDirectory())); } catch (IOException e) { e.printStackTrace(); } } } */ /** * Upload triples to a rdf store * @param tstoreurl: triple store url. e.g., http://ontosoft.isi.edu:3030/provenance/data * @param graphurl: graph url. * @param filepath */ private void publishFile(String tstoreurl, String graphurl, String filepath) { System.out.println("Publishing the filepath " + filepath + " on graph " + graphurl); try { CloseableHttpClient httpClient = HttpClients.createDefault(); HttpPut putRequest = new HttpPut(tstoreurl + "?graph=" + graphurl); //Todo: move it to configuration int timeoutSeconds = 10; int CONNECTION_TIMEOUT_MS = timeoutSeconds * 1000; RequestConfig requestConfig = RequestConfig.custom() .setConnectionRequestTimeout(CONNECTION_TIMEOUT_MS) .setConnectTimeout(CONNECTION_TIMEOUT_MS) .setSocketTimeout(CONNECTION_TIMEOUT_MS) .build(); putRequest.setConfig(requestConfig); File file = new File(filepath); String content = FileUtils.readFileToString(file); if (content != null) { StringEntity input = new StringEntity(content); input.setContentType("text/turtle"); putRequest.setEntity(input); HttpResponse response = httpClient.execute(putRequest); int statusCode = response.getStatusLine().getStatusCode(); if (statusCode > 299) { System.err.println("Unable to upload the domain " + statusCode); System.err.println(response.getStatusLine().getReasonPhrase()); } else { System.err.println("Success uploading the domain " + statusCode); System.err.println(response.getStatusLine().getReasonPhrase()); } } else { System.err.println("File content is null " + filepath); } } catch (IOException e) { e.printStackTrace(); } } /* private boolean graphExists(String tstoreurl, String graphurl) { try { CloseableHttpClient httpClient = HttpClients.createDefault(); HttpGet getRequest = new HttpGet(tstoreurl + "?graph=" + graphurl); int timeoutSeconds = 5; int CONNECTION_TIMEOUT_MS = timeoutSeconds * 1000; RequestConfig requestConfig = RequestConfig.custom() .setConnectionRequestTimeout(CONNECTION_TIMEOUT_MS) .setConnectTimeout(CONNECTION_TIMEOUT_MS) .setSocketTimeout(CONNECTION_TIMEOUT_MS) .build(); getRequest.setConfig(requestConfig); HttpResponse response = httpClient.execute(getRequest); if(response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) return true; return false; } catch (IOException e) { e.printStackTrace(); return false; } } */ }
package io.pkts.frame; import io.pkts.buffer.Buffer; import io.pkts.buffer.Buffers; import io.pkts.protocol.Protocol; import java.io.IOException; import java.io.OutputStream; import java.nio.ByteOrder; /** * * @author jonas@jonasborjesson.com */ public final class PcapGlobalHeader { /** * pcap_hdr_s struct is SIZE bytes long. */ public static final int SIZE = 24; /** * See http://wiki.wireshark.org/Development/LibpcapFileFormat */ public static final byte[] MAGIC_BIG_ENDIAN = { (byte) 0xa1, (byte) 0xb2, (byte) 0xc3, (byte) 0xd4 }; /** * See http://wiki.wireshark.org/Development/LibpcapFileFormat */ public static final byte[] MAGIC_LITTLE_ENDIAN = { (byte) 0xd4, (byte) 0xc3, (byte) 0xb2, (byte) 0xa1 }; /** * New pcap format */ public static final byte[] MAGIC_NGPCAP = { (byte) 0x0A, (byte) 0x0D, (byte) 0x0D, (byte) 0x0A }; /** * Found the following at: * http://anonsvn.wireshark.org/wireshark/trunk/wiretap/libpcap.h * * PCAP_NSEC_MAGIC is for Ulf Lamping's modified "libpcap" format, which * uses the same common file format as PCAP_MAGIC, but the timestamps are * saved in nanosecond resolution instead of microseconds. */ public static final byte[] MAGIC_NSEC = { (byte) 0xa1, (byte) 0xb2, (byte) 0x3c, (byte) 0x4d }; public static final byte[] MAGIC_NSEC_SWAPPED = { (byte) 0x4d, (byte) 0x3c, (byte) 0xb2, (byte) 0xa1 }; /** * Found the following at: * http://anonsvn.wireshark.org/wireshark/trunk/wiretap/libpcap.h * * * PCAP_MODIFIED_MAGIC is for Alexey Kuznetsov's modified "libpcap" format, * as generated on Linux systems that have a "libpcap" with his patches, at * http://ftp.sunet.se/pub/os/Linux/ip-routing/lbl-tools/ * * applied; PCAP_SWAPPED_MODIFIED_MAGIC is the byte-swapped version. */ public static final byte[] MAGIC_MODIFIED = { (byte) 0xa1, (byte) 0xb2, (byte) 0xcd, (byte) 0x34 }; public static final byte[] MAGIC_MODIFIED_SWAPPED = { (byte) 0x34, (byte) 0xcd, (byte) 0xb2, (byte) 0xa1 }; private final ByteOrder byteOrder; private final byte[] body; private final boolean nsTimestamps; /** * Factory method for creating a default {@link PcapGlobalHeader}. Mainly * used for when writing out new pcaps to a stream. * * @return */ public static PcapGlobalHeader createDefaultHeader() { return createDefaultHeader(Protocol.ETHERNET_II); } public static PcapGlobalHeader createDefaultHeader(Protocol protocol) { Buffer body = Buffers.createBuffer(20); // major version number body.setUnsignedByte(0, (short) 2); // minor version number body.setUnsignedByte(2, (short) 4); // GMT to local correction - in practice always zero body.setUnsignedInt(4, 0); // accuracy of timestamp - always zero. body.setUnsignedInt(8, 0); // snaplength - typically 65535 body.setUnsignedInt(12, 65535); // data link type - default is ethernet // See http://www.tcpdump.org/linktypes.html for a complete list if (protocol == null) { protocol = Protocol.ETHERNET_II; } Long linkType = protocol.getLinkType(); if (linkType != null) { body.setUnsignedInt(16, linkType); } else { throw new IllegalArgumentException("Unknown protocol \"" + protocol + "\". Not sure how to construct the global header. You probably need to add some code yourself"); } return new PcapGlobalHeader(ByteOrder.LITTLE_ENDIAN, body.getRawArray()); } public PcapGlobalHeader(final ByteOrder byteOrder, final byte[] body) { this(byteOrder,body,false); } public PcapGlobalHeader(final ByteOrder byteOrder, final byte[] body, final boolean nsTimestamps) { assert byteOrder != null; assert body != null && body.length == 20; this.byteOrder = byteOrder; this.body = body; this.nsTimestamps = nsTimestamps; } public ByteOrder getByteOrder() { return this.byteOrder; } public boolean timestampsInNs() { return this.nsTimestamps; } /** * Major version is currently 2 * * @return */ public int getMajorVersion() { return getUnsignedShort(0, this.body, this.byteOrder); } /** * Minor version is currently 4 * * @return */ public int getMinorVersion() { return getUnsignedShort(2, this.body, this.byteOrder); } /** * in theory, the accuracy of time stamps in the capture; in practice, all * tools set it to 0 * * @return */ public int getTimeAccuracy() { return getInt(8, this.body, this.byteOrder); } /** * The correction time in seconds between GMT (UTC) and the local timezone * of the following packet header timestamps. Examples: If the timestamps * are in GMT (UTC), thiszone is simply 0. If the timestamps are in Central * European time (Amsterdam, Berlin, ...) which is GMT + 1:00, thiszone must * be -3600. In practice, time stamps are always in GMT, so thiszone is * always 0. * * @return */ public long getTimeZoneCorrection() { return getUnsignedInt(4, this.body, this.byteOrder); } /** * the "snapshot length" for the capture (typically 65535 or even more, but * might be limited by the user) * * @return */ public long getSnapLength() { return getUnsignedInt(12, this.body, this.byteOrder); } public int getDataLinkType() { return getInt(16, this.body, this.byteOrder); } public static final int getUnsignedShort(final int offset, final byte[] buffer, final ByteOrder byteOrder) { if (byteOrder == ByteOrder.BIG_ENDIAN) { return (buffer[offset + 0] & 0xff) << 8 | buffer[offset + 1] & 0xff; } return (buffer[offset + 1] & 0xff) << 8 | buffer[offset + 0] & 0xff; } public static final long getUnsignedInt(final int offset, final byte[] buffer, final ByteOrder byteOrder) { if (byteOrder == ByteOrder.BIG_ENDIAN) { return (buffer[offset + 0] & 0xff) << 24 | (buffer[offset + 1] & 0xff) << 16 | (buffer[offset + 2] & 0xff) << 8 | buffer[offset + 3] & 0xff; } return (buffer[offset + 3] & 0xff) << 24 | (buffer[offset + 2] & 0xff) << 16 | (buffer[offset + 1] & 0xff) << 8 | buffer[offset + 0] & 0xff; } public static final int getInt(final int offset, final byte[] buffer, final ByteOrder byteOrder) { return (int) getUnsignedInt(offset, buffer, byteOrder); } public static final PcapGlobalHeader parse(final Buffer in) throws IOException { final Buffer h = in.readBytes(4); final byte[] header = h.getArray(); ByteOrder byteOrder = null; boolean nsTimestamps = false; if (header[0] == MAGIC_BIG_ENDIAN[0] && header[1] == MAGIC_BIG_ENDIAN[1] && header[2] == MAGIC_BIG_ENDIAN[2] && header[3] == MAGIC_BIG_ENDIAN[3]) { byteOrder = ByteOrder.BIG_ENDIAN; } else if (header[0] == MAGIC_LITTLE_ENDIAN[0] && header[1] == MAGIC_LITTLE_ENDIAN[1] && header[2] == MAGIC_LITTLE_ENDIAN[2] && header[3] == MAGIC_LITTLE_ENDIAN[3]) { byteOrder = ByteOrder.LITTLE_ENDIAN; } else if (header[0] == MAGIC_NSEC[0] && header[1] == MAGIC_NSEC[1] && header[2] == MAGIC_NSEC[2] && header[3] == MAGIC_NSEC[3]) { byteOrder = ByteOrder.BIG_ENDIAN; nsTimestamps = true; } else if (header[0] == MAGIC_NSEC_SWAPPED[0] && header[1] == MAGIC_NSEC_SWAPPED[1] && header[2] == MAGIC_NSEC_SWAPPED[2] && header[3] == MAGIC_NSEC_SWAPPED[3]) { byteOrder = ByteOrder.LITTLE_ENDIAN; nsTimestamps = true; } else { throw new IllegalArgumentException("Unknown header type"); } final byte[] body = in.readBytes(20).getArray(); return new PcapGlobalHeader(byteOrder, body, nsTimestamps); } /** * Will write this header to the output stream. * * @param out */ public void write(final OutputStream out) throws IOException { if (this.nsTimestamps) { if (this.byteOrder == ByteOrder.BIG_ENDIAN) { out.write(MAGIC_NSEC); } else { out.write(MAGIC_NSEC_SWAPPED); } } else { if (this.byteOrder == ByteOrder.BIG_ENDIAN) { out.write(MAGIC_BIG_ENDIAN); } else { out.write(MAGIC_LITTLE_ENDIAN); } } out.write(this.body); } @Override public String toString() { final StringBuilder sb = new StringBuilder(); sb.append("Version: ").append(getMajorVersion()).append(".").append(getMinorVersion()).append("\n") .append("TimeZone: ").append(getTimeZoneCorrection()).append("\n") .append("Accuracy: ").append(getTimeAccuracy()).append("\n") .append("SnapLength: ").append(getSnapLength()).append("\n") .append("Network: ").append(getDataLinkType()).append("\n"); return sb.toString(); } public static long unsignedInt(final byte a, final byte b, final byte c, final byte d) { return (a & 0xff) << 24 | (b & 0xff) << 16 | (c & 0xff) << 8 | d & 0xff; } }
/****************************************************************** * File: ValueStingArray.java * Created by: Dave Reynolds * Created on: 29 Nov 2013 * * (c) Copyright 2013, Epimorphics Limited * *****************************************************************/ package com.epimorphics.dclib.values; import com.epimorphics.dclib.framework.ConverterProcess; import com.epimorphics.util.NameUtils; import java.util.ArrayList; import org.apache.jena.datatypes.RDFDatatype; import org.apache.jena.datatypes.TypeMapper; import org.apache.jena.graph.Node; import org.apache.jena.graph.NodeFactory; import org.apache.jena.vocabulary.XSD; /** * Wraps an array of strings, e.g. from a split operation. This allows * a pattern to return multiple results. * * @author <a href="mailto:dave@epimorphics.com">Dave Reynolds</a> */ public class ValueArray extends ValueBase<Value[]> implements Value { public ValueArray(Value[] values) { super(values); } public ValueArray(String[] values) { super(wrapStrings(values)); } private static Value[] wrapStrings(String[] values) { Value[] wrapped = new Value[ values.length ]; for (int i = 0; i < values.length; i++) { wrapped[i] = new ValueString(values[i]); } return wrapped; } @Override public boolean isNull() { return value == null || value.length == 0; } @Override public boolean isMulti() { return true; } @Override public Value[] getValues() { return value; } @Override public Value append(Value app) { if (app.isMulti()) { Value[] apps = app.getValues(); int len = apps.length; Value[] results = new Value[value.length * len]; for (int i = 0; i < value.length; i++) { for (int j = 0; j < len; j++) { results[i*len + j] = value[i].append( apps[j] ); } } return new ValueArray(results); } else { String[] results = new String[value.length]; for (int i = 0; i < value.length; i++) { results[i] = value[i] + app.toString(); } return new ValueArray(results); } } @Override public Value asString() { return this; } @Override public Node asNode() { return null; } @Override public String getDatatype() { return null; } public Value get(int i) { return value[i]; } // Value methods applicable to any type public Object datatype(final String typeURI) { return applyFunction(new MapValue() { public Value map(Value value) { return new ValueNode( NodeFactory.createLiteral(value.toString(), typeFor(typeURI)) ); } }); } protected RDFDatatype typeFor(String typeURI) { return TypeMapper.getInstance().getSafeTypeByName( expandTypeURI(typeURI) ); } protected String expandTypeURI(String typeURI) { typeURI = ConverterProcess.getGlobalDataContext().expandURI(typeURI); if (typeURI.startsWith("xsd:")) { // Hardwired xsd: even if the prefix mapping doesn't have it typeURI = typeURI.replace("xsd:", XSD.getURI()); } return typeURI; } public Object format(final String fmtstr) { return applyFunction(new MapValue() { public Value map(Value value) { return new ValueString(String.format(fmtstr, value)); } }); } public boolean isString() { return false; } public boolean isNumber() { return false; } public boolean isDate() { return false; } public Value asNumber() { return applyFunction(new MapValue() { public Value map(Value value) { ValueNumber v = new ValueNumber(value.toString()); if (v.isNull()) { reportError("Could not convert " + value + " to a number"); } return v; } }); } @Override public Value asRDFNode() { return applyFunction(new MapValue() { public Value map(Value value) { @SuppressWarnings("rawtypes") Value v = ((ValueBase) value).asRDFNode() ; return v; } }); } public Value asResource() { return applyFunction(new MapValue() { public Value map(Value value) { return GlobalFunctions.asResource((Object) value); } }); } @Override public Value map(final String mapsource, final boolean matchRequired) { return applyFunction(new MapValue() { @SuppressWarnings("rawtypes") public Value map(Value value) { return ((ValueBase)value).map(mapsource, matchRequired); } }); } public Value map(final String mapsource) { return applyFunction(new MapValue() { @SuppressWarnings("rawtypes") public Value map(Value value) { return ((ValueBase)value).map(mapsource); } }); } public Value map(final String[] mapsources, final Object deflt) { return applyFunction(new MapValue() { @SuppressWarnings("rawtypes") public Value map(Value value) { return ((ValueBase)value).map(mapsources, deflt); } }); } public Value asDate(final String format, final String typeURI) { return applyFunction(new MapValue() { public Value map(Value value) { return ValueDate.parse(value.toString(), format, expandTypeURI(typeURI)); } }); } public Value asDate(final String typeURI) { return applyFunction(new MapValue() { public Value map(Value value) { return ValueDate.parse(value.toString(), expandTypeURI(typeURI)); } }); } public Value referenceTime() { return applyFunction(new MapValue() { public Value map(Value value) { if(value instanceof ValueDate ) { return ((ValueDate)value).referenceTime(); } reportError("Could not generate reference time for " + value + " not a ValueDate"); return new ValueNull(); } }); } public Value toLowerCase() { return applyFunction(new MapValue() { public Value map(Value value) { return wrap(value.toString().toLowerCase()); } }); } public Value toUpperCase() { return applyFunction(new MapValue() { public Value map(Value value) { return wrap(value.toString().toUpperCase()); } }); } public Value toSegment() { return applyFunction(new MapValue() { public Value map(Value value) { return wrap( NameUtils.safeName(toString()) ); } }); } public Value toCleanSegment() { return applyFunction(new MapValue() { @SuppressWarnings("rawtypes") public Value map(Value value) { return ((ValueBase)value).toCleanSegment(); } }); } public Value toSegment(final String repl) { return applyFunction(new MapValue() { @SuppressWarnings("rawtypes") public Value map(Value value) { return ((ValueBase)value).toSegment(repl); } }); } public Value trim() { return applyFunction(new MapValue() { @SuppressWarnings("rawtypes") public Value map(Value value) { return ((ValueBase)value).trim(); } }); } public Value substring(final int offset) { return applyFunction(new MapValue() { @SuppressWarnings("rawtypes") public Value map(Value value) { return ((ValueBase)value).substring(offset); } }); } public Value substring(final int start, final int end) { return applyFunction(new MapValue() { @SuppressWarnings("rawtypes") public Value map(Value value) { return ((ValueBase)value).substring(start, end); } }); } public Value replaceAll(final String regex, final String replacement) { return applyFunction(new MapValue() { @SuppressWarnings("rawtypes") public Value map(Value value) { return ((ValueBase)value).replaceAll(regex, replacement); } }); } public Value regex(final String regex) { return applyFunction(new MapValue() { @SuppressWarnings("rawtypes") public Value map(Value value) { return ((ValueBase)value).regex(regex); } }); } public Value lastSegment() { return applyFunction(new MapValue() { @SuppressWarnings("rawtypes") public Value map(Value value) { return ((ValueBase)value).lastSegment(); } }); } public interface MapValue { public Value map(Value value); } public ValueArray applyFunction(MapValue map) { Value[] result = new Value[ value.length ]; for (int i = 0; i < value.length; i++) { result[i] = map.map( value[i]); } return new ValueArray(result); } public String toString() { StringBuilder sb = new StringBuilder() ; if (value == null) return null; boolean first = true; sb.append('['); for(Value v : value) { sb.append(first ? "" : " | "); sb.append(v.toString()) ; first = false; } sb.append(']'); return sb.toString(); } // public ValueArray flatten() { // ArrayList<Value> result = new ArrayList<Value>() ; // Value[] a = null; // for (Value v : value) { // if(v instanceof ValueArray) { // ValueArray values = ((ValueArray) v).flatten(); // for (Value x : values.getValues()) // result.add(x); // } // else result.add(v); // } // a = result.toArray(a); // return new ValueArray(a); // } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.storage.file.share; import com.azure.core.http.HttpHeaders; import com.azure.storage.file.share.models.NtfsFileAttributes; import java.time.OffsetDateTime; import java.time.format.DateTimeFormatter; import java.util.EnumSet; /** * A class used to represent the SMB properties of a file. */ public class FileSmbProperties { private String filePermissionKey; private EnumSet<NtfsFileAttributes> ntfsFileAttributes; private OffsetDateTime fileCreationTime; private OffsetDateTime fileLastWriteTime; private final OffsetDateTime fileChangeTime; private final String fileId; private final String parentId; /** * Default constructor */ public FileSmbProperties() { // Non user-settable properties fileChangeTime = null; fileId = null; parentId = null; } /** * @return The file's permission key. */ public String getFilePermissionKey() { return filePermissionKey; } /** * @return The file's {@link NtfsFileAttributes}. */ public EnumSet<NtfsFileAttributes> getNtfsFileAttributes() { return ntfsFileAttributes; } /** * @return The file's creation time. */ public OffsetDateTime getFileCreationTime() { return fileCreationTime; } /** * @return The file's last write time. */ public OffsetDateTime getFileLastWriteTime() { return fileLastWriteTime; } /** * @return The file's change time. */ public OffsetDateTime getFileChangeTime() { return fileChangeTime; } /** * @return The file's ID. */ public String getFileId() { return fileId; } /** * @return The file's parent ID. */ public String getParentId() { return parentId; } /** * Sets the file permission key. * * @param filePermissionKey The file permission key. * @return the updated FileSmbProperties object. */ public FileSmbProperties setFilePermissionKey(String filePermissionKey) { this.filePermissionKey = filePermissionKey; return this; } /** * Sets the ntfs file attributes. * * @param ntfsFileAttributes An enum set of the ntfs file attributes. * @return the updated FileSmbProperties object. */ public FileSmbProperties setNtfsFileAttributes(EnumSet<NtfsFileAttributes> ntfsFileAttributes) { this.ntfsFileAttributes = ntfsFileAttributes; return this; } /** * Sets the file creation time. * * @param fileCreationTime The file creation time. * @return the updated FileSmbProperties object.. */ public FileSmbProperties setFileCreationTime(OffsetDateTime fileCreationTime) { this.fileCreationTime = fileCreationTime; return this; } /** * Sets the file last write time. * * @param fileLastWriteTime The file last write time. * @return the updated FileSmbProperties object. */ public FileSmbProperties setFileLastWriteTime(OffsetDateTime fileLastWriteTime) { this.fileLastWriteTime = fileLastWriteTime; return this; } // HELPER METHODS /** * Determines the value of the file permission header. * * @param filePermission The file permission. * @param defaultValue The default file permission header value. * @return The value of the file permission header */ String setFilePermission(String filePermission, String defaultValue) { return (filePermission == null) && (filePermissionKey == null) ? defaultValue : filePermission; } /** * Determines the value of the ntfs attributes header. * * @param defaultValue The default ntfs attributes header value. * @return The value of the ntfs attributes header */ String setNtfsFileAttributes(String defaultValue) { return ntfsFileAttributes == null ? defaultValue : NtfsFileAttributes.toString(ntfsFileAttributes); } /** * Determines the value of the creation time header. * * @param defaultValue The default creation time header value. * @return The value of the creation time header */ String setFileCreationTime(String defaultValue) { return fileCreationTime == null ? defaultValue : parseFileSMBDate(fileCreationTime); } /** * Determines the value of the last write time header. * * @param defaultValue The default last write time header value. * @return The value of the last write time header */ String setFileLastWriteTime(String defaultValue) { return fileLastWriteTime == null ? defaultValue : parseFileSMBDate(fileLastWriteTime); } /** * Given an <code>OffsetDateTime</code>, generates a {@code String} representing a date in the format needed for * file SMB properties * * @param time the <code>OffsetDateTime</code> to be interpreted as a {@code String} * @return The {@code String} representing the date */ private static String parseFileSMBDate(OffsetDateTime time) { return time.format(DateTimeFormatter.ofPattern(FileConstants.SMB_DATE_STRING)); } /** * Creates a new FileSmbProperties object from HttpHeaders * * @param httpHeaders The headers to construct FileSmbProperties from */ FileSmbProperties(HttpHeaders httpHeaders) { this.filePermissionKey = httpHeaders.getValue(FileConstants.HeaderConstants.FILE_PERMISSION_KEY); this.ntfsFileAttributes = NtfsFileAttributes .toAttributes(httpHeaders.getValue(FileConstants.HeaderConstants.FILE_ATTRIBUTES)); this.fileCreationTime = OffsetDateTime.parse(httpHeaders .getValue(FileConstants.HeaderConstants.FILE_CREATION_TIME)); this.fileLastWriteTime = OffsetDateTime.parse(httpHeaders .getValue(FileConstants.HeaderConstants.FILE_LAST_WRITE_TIME)); this.fileChangeTime = OffsetDateTime.parse(httpHeaders .getValue(FileConstants.HeaderConstants.FILE_CHANGE_TIME)); this.fileId = httpHeaders.getValue(FileConstants.HeaderConstants.FILE_ID); this.parentId = httpHeaders.getValue(FileConstants.HeaderConstants.FILE_PARENT_ID); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.logging; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.common.SuppressLoggerChecks; import org.elasticsearch.common.util.concurrent.ThreadContext; import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; import java.time.format.SignStyle; import java.util.HashMap; import java.util.Iterator; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.CopyOnWriteArraySet; import java.util.regex.Matcher; import java.util.regex.Pattern; import static java.time.temporal.ChronoField.DAY_OF_MONTH; import static java.time.temporal.ChronoField.DAY_OF_WEEK; import static java.time.temporal.ChronoField.HOUR_OF_DAY; import static java.time.temporal.ChronoField.MINUTE_OF_HOUR; import static java.time.temporal.ChronoField.MONTH_OF_YEAR; import static java.time.temporal.ChronoField.SECOND_OF_MINUTE; import static java.time.temporal.ChronoField.YEAR; /** * A logger that logs deprecation notices. */ public class DeprecationLogger { private final Logger logger; /** * This is set once by the {@code Node} constructor, but it uses {@link CopyOnWriteArraySet} to ensure that tests can run in parallel. * <p> * Integration tests will create separate nodes within the same classloader, thus leading to a shared, {@code static} state. * In order for all tests to appropriately be handled, this must be able to remember <em>all</em> {@link ThreadContext}s that it is * given in a thread safe manner. * <p> * For actual usage, multiple nodes do not share the same JVM and therefore this will only be set once in practice. */ private static final CopyOnWriteArraySet<ThreadContext> THREAD_CONTEXT = new CopyOnWriteArraySet<>(); /** * Set the {@link ThreadContext} used to add deprecation headers to network responses. * <p> * This is expected to <em>only</em> be invoked by the {@code Node}'s constructor (therefore once outside of tests). * * @param threadContext The thread context owned by the {@code ThreadPool} (and implicitly a {@code Node}) * @throws IllegalStateException if this {@code threadContext} has already been set */ public static void setThreadContext(ThreadContext threadContext) { Objects.requireNonNull(threadContext, "Cannot register a null ThreadContext"); // add returning false means it _did_ have it already if (THREAD_CONTEXT.add(threadContext) == false) { throw new IllegalStateException("Double-setting ThreadContext not allowed!"); } } /** * Remove the {@link ThreadContext} used to add deprecation headers to network responses. * <p> * This is expected to <em>only</em> be invoked by the {@code Node}'s {@code close} method (therefore once outside of tests). * * @param threadContext The thread context owned by the {@code ThreadPool} (and implicitly a {@code Node}) * @throws IllegalStateException if this {@code threadContext} is unknown (and presumably already unset before) */ public static void removeThreadContext(ThreadContext threadContext) { assert threadContext != null; // remove returning false means it did not have it already if (THREAD_CONTEXT.remove(threadContext) == false) { throw new IllegalStateException("Removing unknown ThreadContext not allowed!"); } } /** * Creates a new deprecation logger based on the parent logger. Automatically * prefixes the logger name with "deprecation", if it starts with "org.elasticsearch.", * it replaces "org.elasticsearch" with "org.elasticsearch.deprecation" to maintain * the "org.elasticsearch" namespace. */ public DeprecationLogger(Logger parentLogger) { String name = parentLogger.getName(); if (name.startsWith("org.elasticsearch")) { name = name.replace("org.elasticsearch.", "org.elasticsearch.deprecation."); } else { name = "deprecation." + name; } this.logger = LogManager.getLogger(name); } /** * Logs a deprecated message. */ public void deprecated(String msg, Object... params) { deprecated(THREAD_CONTEXT, msg, params); } /* * RFC7234 specifies the warning format as warn-code <space> warn-agent <space> "warn-text" [<space> "warn-date"]. Here, warn-code is a * three-digit number with various standard warn codes specified. The warn code 299 is apt for our purposes as it represents a * miscellaneous persistent warning (can be presented to a human, or logged, and must not be removed by a cache). The warn-agent is an * arbitrary token; here we use the Elasticsearch version and build hash. The warn text must be quoted. The warn-date is an optional * quoted field that can be in a variety of specified date formats; here we use RFC 1123 format. */ private static final String WARNING_FORMAT = String.format( Locale.ROOT, "299 Elasticsearch-%s%s-%s ", Version.CURRENT.toString(), Build.CURRENT.isSnapshot() ? "-SNAPSHOT" : "", Build.CURRENT.shortHash()) + "\"%s\" \"%s\""; /* * RFC 7234 section 5.5 specifies that the warn-date is a quoted HTTP-date. HTTP-date is defined in RFC 7234 Appendix B as being from * RFC 7231 section 7.1.1.1. RFC 7231 specifies an HTTP-date as an IMF-fixdate (or an obs-date referring to obsolete formats). The * grammar for IMF-fixdate is specified as 'day-name "," SP date1 SP time-of-day SP GMT'. Here, day-name is * (Mon|Tue|Wed|Thu|Fri|Sat|Sun). Then, date1 is 'day SP month SP year' where day is 2DIGIT, month is * (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec), and year is 4DIGIT. Lastly, time-of-day is 'hour ":" minute ":" second' where * hour is 2DIGIT, minute is 2DIGIT, and second is 2DIGIT. Finally, 2DIGIT and 4DIGIT have the obvious definitions. */ private static final DateTimeFormatter RFC_7231_DATE_TIME; static { final Map<Long, String> dow = new HashMap<>(); dow.put(1L, "Mon"); dow.put(2L, "Tue"); dow.put(3L, "Wed"); dow.put(4L, "Thu"); dow.put(5L, "Fri"); dow.put(6L, "Sat"); dow.put(7L, "Sun"); final Map<Long, String> moy = new HashMap<>(); moy.put(1L, "Jan"); moy.put(2L, "Feb"); moy.put(3L, "Mar"); moy.put(4L, "Apr"); moy.put(5L, "May"); moy.put(6L, "Jun"); moy.put(7L, "Jul"); moy.put(8L, "Aug"); moy.put(9L, "Sep"); moy.put(10L, "Oct"); moy.put(11L, "Nov"); moy.put(12L, "Dec"); RFC_7231_DATE_TIME = new DateTimeFormatterBuilder() .parseCaseInsensitive() .parseLenient() .optionalStart() .appendText(DAY_OF_WEEK, dow) .appendLiteral(", ") .optionalEnd() .appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE) .appendLiteral(' ') .appendText(MONTH_OF_YEAR, moy) .appendLiteral(' ') .appendValue(YEAR, 4) .appendLiteral(' ') .appendValue(HOUR_OF_DAY, 2) .appendLiteral(':') .appendValue(MINUTE_OF_HOUR, 2) .optionalStart() .appendLiteral(':') .appendValue(SECOND_OF_MINUTE, 2) .optionalEnd() .appendLiteral(' ') .appendOffset("+HHMM", "GMT") .toFormatter(Locale.getDefault(Locale.Category.FORMAT)); } private static final ZoneId GMT = ZoneId.of("GMT"); /** * Regular expression to test if a string matches the RFC7234 specification for warning headers. This pattern assumes that the warn code * is always 299. Further, this pattern assumes that the warn agent represents a version of Elasticsearch including the build hash. */ public static Pattern WARNING_HEADER_PATTERN = Pattern.compile( "299 " + // warn code "Elasticsearch-\\d+\\.\\d+\\.\\d+(?:-(?:alpha|beta|rc)\\d+)?(?:-SNAPSHOT)?-(?:[a-f0-9]{7}|Unknown) " + // warn agent "\"((?:\t| |!|[\\x23-\\x5b]|[\\x5d-\\x7e]|[\\x80-\\xff]|\\\\|\\\\\")*)\" " + // quoted warning value, captured // quoted RFC 1123 date format "\"" + // opening quote "(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), " + // weekday "\\d{2} " + // 2-digit day "(?:Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) " + // month "\\d{4} " + // 4-digit year "\\d{2}:\\d{2}:\\d{2} " + // (two-digit hour):(two-digit minute):(two-digit second) "GMT" + // GMT "\""); // closing quote /** * Extracts the warning value from the value of a warning header that is formatted according to RFC 7234. That is, given a string * {@code 299 Elasticsearch-6.0.0 "warning value" "Sat, 25 Feb 2017 10:27:43 GMT"}, the return value of this method would be {@code * warning value}. * * @param s the value of a warning header formatted according to RFC 7234. * @return the extracted warning value */ public static String extractWarningValueFromWarningHeader(final String s) { /* * We know the exact format of the warning header, so to extract the warning value we can skip forward from the front to the first * quote, and skip backwards from the end to the penultimate quote: * * 299 Elasticsearch-6.0.0 "warning value" "Sat, 25, Feb 2017 10:27:43 GMT" * ^ ^ ^ * firstQuote penultimateQuote lastQuote * * We do it this way rather than seeking forward after the first quote because there could be escaped quotes in the warning value * but since there are none in the warning date, we can skip backwards to find the quote that closes the quoted warning value. * * We parse this manually rather than using the capturing regular expression because the regular expression involves a lot of * backtracking and carries a performance penalty. However, when assertions are enabled, we still use the regular expression to * verify that we are maintaining the warning header format. */ final int firstQuote = s.indexOf('\"'); final int lastQuote = s.lastIndexOf('\"'); final int penultimateQuote = s.lastIndexOf('\"', lastQuote - 1); final String warningValue = s.substring(firstQuote + 1, penultimateQuote - 2); assert assertWarningValue(s, warningValue); return warningValue; } /** * Assert that the specified string has the warning value equal to the provided warning value. * * @param s the string representing a full warning header * @param warningValue the expected warning header * @return {@code true} if the specified string has the expected warning value */ private static boolean assertWarningValue(final String s, final String warningValue) { final Matcher matcher = WARNING_HEADER_PATTERN.matcher(s); final boolean matches = matcher.matches(); assert matches; return matcher.group(1).equals(warningValue); } /** * Logs a deprecated message to the deprecation log, as well as to the local {@link ThreadContext}. * * @param threadContexts The node's {@link ThreadContext} (outside of concurrent tests, this should only ever have one context). * @param message The deprecation message. * @param params The parameters used to fill in the message, if any exist. */ @SuppressLoggerChecks(reason = "safely delegates to logger") void deprecated(final Set<ThreadContext> threadContexts, final String message, final Object... params) { final Iterator<ThreadContext> iterator = threadContexts.iterator(); if (iterator.hasNext()) { final String formattedMessage = LoggerMessageFormat.format(message, params); final String warningHeaderValue = formatWarning(formattedMessage); assert WARNING_HEADER_PATTERN.matcher(warningHeaderValue).matches(); assert extractWarningValueFromWarningHeader(warningHeaderValue).equals(escape(formattedMessage)); while (iterator.hasNext()) { try { final ThreadContext next = iterator.next(); next.addResponseHeader("Warning", warningHeaderValue, DeprecationLogger::extractWarningValueFromWarningHeader); } catch (final IllegalStateException e) { // ignored; it should be removed shortly } } logger.warn(formattedMessage); } else { logger.warn(message, params); } } /** * Format a warning string in the proper warning format by prepending a warn code, warn agent, wrapping the warning string in quotes, * and appending the RFC 7231 date. * * @param s the warning string to format * @return a warning value formatted according to RFC 7234 */ public static String formatWarning(final String s) { return String.format(Locale.ROOT, WARNING_FORMAT, escape(s), RFC_7231_DATE_TIME.format(ZonedDateTime.now(GMT))); } /** * Escape backslashes and quotes in the specified string. * * @param s the string to escape * @return the escaped string */ public static String escape(String s) { return s.replaceAll("([\"\\\\])", "\\\\$1"); } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.stunner.cm.client.canvas; import javax.enterprise.context.Dependent; import javax.enterprise.event.Event; import javax.inject.Inject; import org.kie.workbench.common.stunner.client.lienzo.canvas.wires.WiresCanvas; import org.kie.workbench.common.stunner.cm.client.shape.NullShape; import org.kie.workbench.common.stunner.cm.qualifiers.CaseManagementEditor; import org.kie.workbench.common.stunner.core.client.api.ClientDefinitionManager; import org.kie.workbench.common.stunner.core.client.api.ShapeManager; import org.kie.workbench.common.stunner.core.client.canvas.AbstractCanvasHandler; import org.kie.workbench.common.stunner.core.client.canvas.CanvasHandlerImpl; import org.kie.workbench.common.stunner.core.client.canvas.controls.actions.TextPropertyProviderFactory; import org.kie.workbench.common.stunner.core.client.canvas.event.registration.CanvasElementAddedEvent; import org.kie.workbench.common.stunner.core.client.canvas.event.registration.CanvasElementRemovedEvent; import org.kie.workbench.common.stunner.core.client.canvas.event.registration.CanvasElementUpdatedEvent; import org.kie.workbench.common.stunner.core.client.canvas.event.registration.CanvasElementsClearEvent; import org.kie.workbench.common.stunner.core.client.command.CanvasCommandFactory; import org.kie.workbench.common.stunner.core.client.service.ClientFactoryService; import org.kie.workbench.common.stunner.core.client.shape.MutationContext; import org.kie.workbench.common.stunner.core.client.shape.Shape; import org.kie.workbench.common.stunner.core.diagram.Diagram; import org.kie.workbench.common.stunner.core.graph.Edge; import org.kie.workbench.common.stunner.core.graph.Element; import org.kie.workbench.common.stunner.core.graph.Node; import org.kie.workbench.common.stunner.core.graph.content.view.View; import org.kie.workbench.common.stunner.core.graph.processing.index.GraphIndexBuilder; import org.kie.workbench.common.stunner.core.graph.processing.index.MutableIndex; import org.kie.workbench.common.stunner.core.graph.util.GraphUtils; import org.kie.workbench.common.stunner.core.rule.RuleManager; @Dependent @CaseManagementEditor public class CaseManagementCanvasHandler<D extends Diagram, C extends WiresCanvas> extends CanvasHandlerImpl<D, C> { @Inject public CaseManagementCanvasHandler(final ClientDefinitionManager clientDefinitionManager, final ClientFactoryService clientFactoryServices, final RuleManager ruleManager, final GraphUtils graphUtils, final GraphIndexBuilder<? extends MutableIndex<Node, Edge>> indexBuilder, final ShapeManager shapeManager, final TextPropertyProviderFactory textPropertyProviderFactory, final Event<CanvasElementAddedEvent> canvasElementAddedEvent, final Event<CanvasElementRemovedEvent> canvasElementRemovedEvent, final Event<CanvasElementUpdatedEvent> canvasElementUpdatedEvent, final Event<CanvasElementsClearEvent> canvasElementsClearEvent, final @CaseManagementEditor CanvasCommandFactory<AbstractCanvasHandler> canvasCommandFactory) { super(clientDefinitionManager, canvasCommandFactory, clientFactoryServices, ruleManager, graphUtils, indexBuilder, shapeManager, textPropertyProviderFactory, canvasElementAddedEvent, canvasElementRemovedEvent, canvasElementUpdatedEvent, canvasElementsClearEvent); } @Override public boolean isCanvasRoot(final Element parent) { return false; } @Override protected boolean isCanvasRoot(final String pUUID) { return false; } @Override public void register(final Shape shape, final Element<View<?>> candidate, final boolean fireEvents) { if (!isRenderable(shape)) { return; } super.register(shape, candidate, fireEvents); } @Override public void deregister(final Shape shape, final Element element, final boolean fireEvents) { if (!isRenderable(shape)) { return; } super.deregister(shape, element, fireEvents); } @Override public void addShape(final Shape shape) { if (!isRenderable(shape)) { return; } super.addShape(shape); } @Override public void addChild(final Element parent, final Element child) { final Shape parentShape = getCanvas().getShape(parent.getUUID()); final Shape childShape = getCanvas().getShape(child.getUUID()); if (!isRenderable(parentShape, childShape)) { return; } super.addChild(parent, child); } @SuppressWarnings("unchecked") public void addChild(final Element parent, final Element child, final int index) { final Shape parentShape = getCanvas().getShape(parent.getUUID()); final Shape childShape = getCanvas().getShape(child.getUUID()); if (!isRenderable(parentShape, childShape)) { return; } final CaseManagementCanvasPresenter caseManagementCanvasPresenter = (CaseManagementCanvasPresenter) getCanvas(); caseManagementCanvasPresenter.addChildShape(parentShape, childShape, index); } @Override public void removeShape(final Shape shape) { if (!isRenderable(shape)) { return; } super.removeShape(shape); } @Override public void removeChild(final Element parent, final Element child) { final Shape parentShape = getCanvas().getShape(parent.getUUID()); final Shape childShape = getCanvas().getShape(child.getUUID()); if (!isRenderable(parentShape, childShape)) { return; } super.removeChild(parent, child); } @Override public void applyElementMutation(final Shape shape, final Element candidate, final boolean applyPosition, final boolean applyProperties, final MutationContext mutationContext) { if (!isRenderable(shape)) { return; } super.applyElementMutation(shape, candidate, applyPosition, applyProperties, mutationContext); } @Override public void applyElementMutation(final Element candidate, final boolean applyPosition, final boolean applyProperties, final MutationContext mutationContext) { final Shape candidateShape = getCanvas().getShape(candidate.getUUID()); if (!isRenderable(candidateShape)) { return; } super.applyElementMutation(candidate, applyPosition, applyProperties, mutationContext); } boolean isRenderable(final Shape... shapes) { for (Shape shape : shapes) { if (shape == null || shape instanceof NullShape) { return false; } } return true; } }
// jTDS JDBC Driver for Microsoft SQL Server and Sybase // Copyright (C) 2004 The jTDS Project // // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this library; if not, write to the Free Software // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA // package net.sourceforge.jtds.jdbc; import java.io.*; import java.sql.*; import net.sourceforge.jtds.util.Logger; import net.sourceforge.jtds.util.ReaderInputStream; import net.sourceforge.jtds.util.WriterOutputStream; /** * An in-memory, disk or database representation of character data. * <p> * Implementation note: * <ol> * <li> Mostly Brian's original code but modified to include the * ability to convert a stream into a String when required. * <li> SQLException messages loaded from properties file. * </ol> * * @author Brian Heineman * @author Mike Hutchinson * @version $Id: ClobImpl.java,v 1.30 2005/01/17 14:28:24 alin_sinpalean Exp $ */ public class ClobImpl implements Clob { private static final String EMPTY_CLOB = ""; private final ConnectionJDBC2 _connection; private String _clob; private File _clobFile; private JtdsReader _jtdsReader; /** * Constructs a new Clob instance. * * @param callerReference an object reference to the caller of this method; * must be a <code>Connection</code>, <code>Statement</code> or * <code>ResultSet</code> */ ClobImpl(Object callerReference) { this(callerReference, EMPTY_CLOB); } /** * Constructs a new Clob instance. * * @param callerReference an object reference to the caller of this method; * must be a <code>Connection</code>, <code>Statement</code> or * <code>ResultSet</code> * @param clob the clob object to encapsulate */ ClobImpl(Object callerReference, String clob) { if (clob == null) { throw new IllegalArgumentException("clob cannot be null."); } _clob = clob; _connection = Support.getConnection(callerReference); } /** * Constructs a new Clob instance. * * @param callerReference an object reference to the caller of this method; * must be a <code>Connection</code>, <code>Statement</code> or * <code>ResultSet</code> * @param in the clob object to encapsulate * @param ntext <code>true</code> if the data type is NTEXT (i.e. Unicode) * @param readTextMode <code>true</code> if READTEXT should be used to * retrieve the value <code>false</code> if the value * is directly available from <code>in</code> * @param charsetInfo the character set to be used for reading if the value * is not Unicode encoded (i.e. TEXT) */ ClobImpl(Object callerReference, ResponseStream in, boolean ntext, boolean readTextMode, CharsetInfo charsetInfo) throws IOException { if (in == null) { throw new IllegalArgumentException("in cannot be null."); } _connection = Support.getConnection(callerReference); // If the column doesn't have a specific character set, use the default if (charsetInfo == null) { charsetInfo = _connection.getCharsetInfo(); } TextPtr tp = new TextPtr(); in.read(tp.ptr); in.read(tp.ts); tp.len = in.readInt(); readTextMode = false; if (readTextMode) { if (ntext) { char[] buf = new char[tp.len / 2]; in.read(buf); tp.value = buf; // FIXME - Create a JtdsReader instead _clob = new String((char[]) tp.value); } else { byte[] bytes = new byte[tp.len]; in.read(bytes); tp.value = bytes; // FIXME - Create a JtdsReader instead _clob = new String((byte[]) tp.value); } } else { try { if (tp.len < _connection.getLobBuffer()) { if (ntext) { _clob = in.readUnicodeString(tp.len / 2); } else { _clob = in.readNonUnicodeString(tp.len, charsetInfo); } } else { _clob = EMPTY_CLOB; Writer writer = setCharacterStream(1); long length = tp.len; while (length > 0) { int results = (int) Math.min(1024, length); String data; if (ntext) { data = in.readUnicodeString(results / 2); } else { // FIXME This won't work for multi-byte charsets data = in.readNonUnicodeString(results, charsetInfo); } length -= results; writer.write(data); } writer.close(); } if (in.getTdsVersion() < Driver.TDS70 && length() == 1 && getSubString(1, 1).equals(" ")) { // In TDS 4/5 zero length strings are stored as a single space // to distinguish them from nulls. truncate(0); } } catch (SQLException e) { // Should never happen... } } if (ntext && (tp.len & 0x01) != 0) { // If text size is set to an odd number e.g. 1 // Then only part of a char is available. in.read(); // Discard! } /* if (statement != null && statement.getMaxFieldSize() == 1) { // Try to build a CLOB built over a Reader stream. ColData data = getColumn(columnIndex); ColInfo ci = columns[columnIndex - 1]; if (data.getTextPtr() != null && ci.jdbcType == java.sql.Types.LONGVARCHAR) { return new ClobImpl(new JtdsReader((ConnectionJDBC2) statement.getConnection(), ci, data, ((ConnectionJDBC2) statement.getConnection()). getCharset())); } } */ } /** * Returns a new ascii stream for the CLOB data. */ public InputStream getAsciiStream() throws SQLException { return new ReaderInputStream(getCharacterStream(), "ASCII"); } /** * Returns a new reader for the CLOB data. */ public synchronized Reader getCharacterStream() throws SQLException { try { if (_clob != null) { return new StringReader(_clob); } else if (_clobFile != null) { return new BufferedReader(new FileReader(_clobFile)); } _jtdsReader.reset(); return _jtdsReader; } catch (IOException e) { throw new SQLException(Messages.get("error.generic.ioerror", e.getMessage()), "HY000"); } } public String getSubString(long pos, int length) throws SQLException { if (pos < 1) { throw new SQLException(Messages.get("error.blobclob.badpos"), "HY090"); } else if (length < 0) { throw new SQLException(Messages.get("error.blobclob.badlen"), "HY090"); } else if (pos - 1 + length > length()) { // Don't throw an exception, just return as much data as available length = (int) (length() - pos + 1); } if (length == 0) { return EMPTY_CLOB; } Reader reader = getCharacterStream(); skip(reader, pos - 1); try { char[] buffer = new char[length]; int bytesRead = 0, res; while ((res = reader.read(buffer, bytesRead, length - bytesRead)) != -1) { bytesRead += res; if (bytesRead == length) { return new String(buffer); } } throw new SQLException(Messages.get("error.blobclob.readlen"), "HY000"); } catch (IOException ioe) { throw new SQLException( Messages.get("error.generic.ioread", "String", ioe.getMessage()), "HY000"); } } /** * Returns the length of the value. */ public synchronized long length() throws SQLException { if (_clob != null) { return _clob.length(); } else if (_clobFile != null) { return _clobFile.length(); } return _jtdsReader.getLength(); } public long position(String searchStr, long start) throws SQLException { return position(new ClobImpl(_connection, searchStr), start); } public long position(Clob searchStr, long start) throws SQLException { if (searchStr == null) { throw new SQLException(Messages.get("error.clob.searchnull"), "HY024"); } try { Reader reader = getCharacterStream(); long length = length() - searchStr.length(); boolean reset = true; // TODO Implement a better pattern matching algorithm for (long i = start; i < length; i++) { boolean found = true; int value; if (reset) { reader = getCharacterStream(); skip(reader, i); reset = false; } value = reader.read(); Reader searchReader = searchStr.getCharacterStream(); int searchValue; while ((searchValue = searchReader.read()) != -1) { if (value != searchValue) { found = false; break; } reset = true; } if (found) { return i; } } } catch (IOException e) { throw new SQLException( Messages.get("error.generic.ioread", "String", e.getMessage()), "HY000"); } return -1; } public OutputStream setAsciiStream(final long pos) throws SQLException { return new WriterOutputStream(setCharacterStream(pos), "ASCII"); } public synchronized Writer setCharacterStream(final long pos) throws SQLException { long length = length(); if (pos < 1) { throw new SQLException(Messages.get("error.blobclob.badpos"), "HY024"); } else if (pos > length && pos != 1) { throw new SQLException(Messages.get("error.blobclob.badposlen"), "HY024"); } return new ClobWriter(pos, length); } public int setString(long pos, String str) throws SQLException { if (str == null) { throw new SQLException(Messages.get("error.clob.strnull"), "HY090"); } return setString(pos, str, 0, str.length()); } public int setString(long pos, String str, int offset, int len) throws SQLException { Writer writer = setCharacterStream(pos); try { writer.write(str, offset, len); writer.close(); } catch (IOException e) { throw new SQLException( Messages.get("error.generic.iowrite", "String", e.getMessage()), "HY000"); } return len; } /** * Truncates the value to the length specified. * * @param len the length to truncate the value to */ public synchronized void truncate(long len) throws SQLException { long currentLength = length(); if (len < 0) { throw new SQLException(Messages.get("error.blobclob.badlen"), "HY090"); } else if (len > currentLength) { throw new SQLException(Messages.get("error.blobclob.lentoolong"), "HY090"); } if (len == currentLength) { return; } else if (len <= _connection.getLobBuffer()) { _clob = getSubString(1, (int) len); if (_clobFile != null) { _clobFile.delete(); _clobFile = null; } _jtdsReader = null; } else { try { Reader reader = getCharacterStream(); File tmpFile = _clobFile; _clob = ""; _clobFile = null; _jtdsReader = null; Writer writer = setCharacterStream(1); char[] buffer = new char[1024]; int result; while ((result = reader.read(buffer, 0, (int) Math.min(buffer.length, len))) > 0) { len -= result; writer.write(buffer, 0, result); } writer.close(); // If the data came from a file; delete the original file to // free disk space if (tmpFile != null) { tmpFile.delete(); } } catch (IOException e) { throw new SQLException(Messages.get("error.generic.iowrite", "String", e.getMessage()), "HY000"); } } } private void skip(Reader reader, long skip) throws SQLException { try { long skipped = reader.skip(skip); if (skipped != skip) { throw new SQLException(Messages.get("error.blobclob.badposlen"), "HY090"); } } catch (IOException e) { throw new SQLException(Messages.get("error.generic.ioerror", e.getMessage()), "HY000"); } } protected void finalize() { if (_clobFile != null) { _clobFile.delete(); } } /** * Class to manage any Clob write. */ class ClobWriter extends Writer { Writer writer; long curPos; boolean securityFailure = false; ClobWriter(long pos, long length) throws SQLException { curPos = pos - 1; try { if (length > _connection.getLobBuffer()) { if (_clobFile == null) { writeToDisk(getCharacterStream()); } } else if (_jtdsReader != null) { StringWriter sw = new StringWriter((int) length); char[] buffer = new char[1024]; int result; while ((result = _jtdsReader.read(buffer)) != -1) { sw.write(buffer, 0, result); } _clob = sw.toString(); _jtdsReader = null; } updateWriter(); } catch (IOException e) { throw new SQLException(Messages.get("error.generic.ioerror", e.getMessage()), "HY000"); } } public void write(int c) throws IOException { synchronized (ClobImpl.this) { checkSize(1); writer.write(c); curPos++; } } public void write(char[] cbuf, int off, int len) throws IOException { synchronized (ClobImpl.this) { checkSize(len); writer.write(cbuf, off, len); curPos += len; } } /** * Checks the size of the in-memory buffer; if a write will * cause the size to exceed <code>MAXIMUM_SIZE</code> then * the data will be removed from memory and written to disk. * * @param length the length of data to be written */ private void checkSize(long length) throws IOException { // Return if the data has already exceeded the maximum size if (curPos > _connection.getLobBuffer()) { return; } // Return if a file is already being used to store the data if (_clobFile != null) { return; } // Return if there was a security failure attempting to // create a buffer file if (securityFailure) { return; } // Return if the length will not exceed the maximum in-memory // value if (curPos + length <= _connection.getLobBuffer()) { return; } if (_clob != null) { writeToDisk(new StringReader(_clob)); updateWriter(); } } void writeToDisk(Reader reader) throws IOException { Writer wtr; try { _clobFile = File.createTempFile("jtds", ".tmp"); _clobFile.deleteOnExit(); wtr = new BufferedWriter(new FileWriter(_clobFile)); } catch (SecurityException e) { // Unable to write to disk securityFailure = true; if (_clobFile != null) { try { _clobFile.delete(); } catch (SecurityException ex) { // Ignore exception } _clobFile = null; } wtr = new StringWriter(); if (Logger.isActive()) { Logger.println("Clob: Unable to buffer data to disk: " + e.getMessage()); } } try { char[] buffer = new char[1024]; int result; while ((result = reader.read(buffer)) != -1) { wtr.write(buffer, 0, result); } } finally { wtr.flush(); if (wtr instanceof StringWriter) { _clob = wtr.toString(); } else { _clob = null; } wtr.close(); } } /** * Updates the <code>outputStream</code> member by creating the * approperiate type of output stream based upon the current * storage mechanism. * * @throws IOException if any failure occure while creating the * output stream */ void updateWriter() throws IOException { if (_clob != null) { final long startPos = curPos; writer = new Writer() { int curPos = (int) startPos; boolean closed = false; char[] singleChar = new char[1]; private void checkOpen() throws IOException { if (closed) { throw new IOException("stream closed"); } else if (_clob == null) { throw new IOException( Messages.get("error.generic.iowrite", "byte", "_clob = NULL")); } } public void write(int c) throws IOException { checkOpen(); singleChar[0] = (char) c; write(singleChar, 0, 1); } public void write(char[] cbuf, int off, int len) throws IOException { checkOpen(); if (cbuf == null) { throw new NullPointerException(); } else if (off < 0 || len < 0 || off > cbuf.length || off + len > cbuf.length || off + len < 0) { throw new IndexOutOfBoundsException(); } else if (len == 0) { return; } // FIXME - Optimize writes; reduce memory allocation by creating fewer objects. if (curPos + 1 > _clob.length()) { _clob += new String(cbuf, off, len); } else { String tmpClob = _clob; _clob = tmpClob.substring(0, curPos) + new String(cbuf, off, len); if (_clob.length() < tmpClob.length()) { _clob += tmpClob.substring(curPos + len); } } curPos += len; } public void flush() { } public void close() { closed = true; } }; } else { writer = new ClobFileWriter(curPos); } } public void flush() throws IOException { writer.flush(); } public void close() throws IOException { writer.close(); } }; /** * Class to manage Clob file writes. */ class ClobFileWriter extends Writer { RandomAccessFile raf; char[] singleChar = new char[1]; ClobFileWriter(long curPos) throws IOException { raf = new RandomAccessFile(_clobFile, "rw"); raf.seek(curPos); } public void write(int c) throws IOException { singleChar[0] = (char) c; write(singleChar, 0, 1); } public void write(char cbuf[], int off, int len) throws IOException { if (raf == null) { throw new IOException("stream closed"); } if (cbuf == null) { throw new NullPointerException(); } else if (off < 0 || len < 0 || off > cbuf.length || off + len > cbuf.length) { throw new ArrayIndexOutOfBoundsException(); } else if (len == 0) { return; } byte[] data = new String(cbuf, off, len).getBytes(); raf.write(data, 0, data.length); } public void flush() throws IOException { } public void close() throws IOException { if (raf != null) { raf.close(); raf = null; } } } @Override public void free() throws SQLException { // TODO Auto-generated method stub } @Override public Reader getCharacterStream(long arg0, long arg1) throws SQLException { // TODO Auto-generated method stub return null; }; }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.squareup.okhttp.internal.http; import com.squareup.okhttp.Address; import com.squareup.okhttp.CertificatePinner; import com.squareup.okhttp.Connection; import com.squareup.okhttp.ConnectionPool; import com.squareup.okhttp.Headers; import com.squareup.okhttp.Interceptor; import com.squareup.okhttp.MediaType; import com.squareup.okhttp.OkHttpClient; import com.squareup.okhttp.Protocol; import com.squareup.okhttp.Request; import com.squareup.okhttp.Response; import com.squareup.okhttp.ResponseBody; import com.squareup.okhttp.Route; import com.squareup.okhttp.internal.Internal; import com.squareup.okhttp.internal.InternalCache; import com.squareup.okhttp.internal.Util; import java.io.IOException; import java.io.InterruptedIOException; import java.net.CookieHandler; import java.net.ProtocolException; import java.net.Proxy; import java.net.URL; import java.net.UnknownHostException; import java.security.cert.CertificateException; import java.util.Date; import java.util.List; import java.util.Map; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.SSLHandshakeException; import javax.net.ssl.SSLPeerUnverifiedException; import javax.net.ssl.SSLSocketFactory; import okio.Buffer; import okio.BufferedSink; import okio.BufferedSource; import okio.GzipSource; import okio.Okio; import okio.Sink; import okio.Source; import okio.Timeout; import static com.squareup.okhttp.internal.Util.closeQuietly; import static com.squareup.okhttp.internal.Util.getDefaultPort; import static com.squareup.okhttp.internal.Util.getEffectivePort; import static com.squareup.okhttp.internal.http.StatusLine.HTTP_CONTINUE; import static com.squareup.okhttp.internal.http.StatusLine.HTTP_PERM_REDIRECT; import static com.squareup.okhttp.internal.http.StatusLine.HTTP_TEMP_REDIRECT; import static java.net.HttpURLConnection.HTTP_MOVED_PERM; import static java.net.HttpURLConnection.HTTP_MOVED_TEMP; import static java.net.HttpURLConnection.HTTP_MULT_CHOICE; import static java.net.HttpURLConnection.HTTP_NOT_MODIFIED; import static java.net.HttpURLConnection.HTTP_NO_CONTENT; import static java.net.HttpURLConnection.HTTP_PROXY_AUTH; import static java.net.HttpURLConnection.HTTP_SEE_OTHER; import static java.net.HttpURLConnection.HTTP_UNAUTHORIZED; import static java.util.concurrent.TimeUnit.MILLISECONDS; /** * Handles a single HTTP request/response pair. Each HTTP engine follows this * lifecycle: * <ol> * <li>It is created. * <li>The HTTP request message is sent with sendRequest(). Once the request * is sent it is an error to modify the request headers. After * sendRequest() has been called the request body can be written to if * it exists. * <li>The HTTP response message is read with readResponse(). After the * response has been read the response headers and body can be read. * All responses have a response body input stream, though in some * instances this stream is empty. * </ol> * * <p>The request and response may be served by the HTTP response cache, by the * network, or by both in the event of a conditional GET. */ public final class HttpEngine { /** * How many redirects and auth challenges should we attempt? Chrome follows 21 redirects; Firefox, * curl, and wget follow 20; Safari follows 16; and HTTP/1.0 recommends 5. */ public static final int MAX_FOLLOW_UPS = 20; private static final ResponseBody EMPTY_BODY = new ResponseBody() { @Override public MediaType contentType() { return null; } @Override public long contentLength() { return 0; } @Override public BufferedSource source() { return new Buffer(); } }; final OkHttpClient client; private Connection connection; private Address address; private RouteSelector routeSelector; private Route route; private final Response priorResponse; private Transport transport; /** The time when the request headers were written, or -1 if they haven't been written yet. */ long sentRequestMillis = -1; /** * True if this client added an "Accept-Encoding: gzip" header field and is * therefore responsible for also decompressing the transfer stream. */ private boolean transparentGzip; /** * True if the request body must be completely buffered before transmission; * false if it can be streamed. Buffering has two advantages: we don't need * the content-length in advance and we can retransmit if necessary. The * upside of streaming is that we can save memory. */ public final boolean bufferRequestBody; /** * The original application-provided request. Never modified by OkHttp. When * follow-up requests are necessary, they are derived from this request. */ private final Request userRequest; /** * The request to send on the network, or null for no network request. This is * derived from the user request, and customized to support OkHttp features * like compression and caching. */ private Request networkRequest; /** * The cached response, or null if the cache doesn't exist or cannot be used * for this request. Conditional caching means this may be non-null even when * the network request is non-null. Never modified by OkHttp. */ private Response cacheResponse; /** * The user-visible response. This is derived from either the network * response, cache response, or both. It is customized to support OkHttp * features like compression and caching. */ private Response userResponse; private Sink requestBodyOut; private BufferedSink bufferedRequestBody; private final boolean callerWritesRequestBody; private final boolean forWebSocket; /** The cache request currently being populated from a network response. */ private CacheRequest storeRequest; private CacheStrategy cacheStrategy; /** * @param request the HTTP request without a body. The body must be written via the engine's * request body stream. * @param callerWritesRequestBody true for the {@code HttpURLConnection}-style interaction * model where control flow is returned to the calling application to write the request body * before the response body is readable. * @param connection the connection used for an intermediate response immediately prior to this * request/response pair, such as a same-host redirect. This engine assumes ownership of the * connection and must release it when it is unneeded. * @param routeSelector the route selector used for a failed attempt immediately preceding this */ public HttpEngine(OkHttpClient client, Request request, boolean bufferRequestBody, boolean callerWritesRequestBody, boolean forWebSocket, Connection connection, RouteSelector routeSelector, RetryableSink requestBodyOut, Response priorResponse) { this.client = client; this.userRequest = request; this.bufferRequestBody = bufferRequestBody; this.callerWritesRequestBody = callerWritesRequestBody; this.forWebSocket = forWebSocket; this.connection = connection; this.routeSelector = routeSelector; this.requestBodyOut = requestBodyOut; this.priorResponse = priorResponse; if (connection != null) { Internal.instance.setOwner(connection, this); this.route = connection.getRoute(); } else { this.route = null; } } /** * Figures out what the response source will be, and opens a socket to that * source if necessary. Prepares the request headers and gets ready to start * writing the request body if it exists. */ public void sendRequest() throws IOException { if (cacheStrategy != null) return; // Already sent. if (transport != null) throw new IllegalStateException(); Request request = networkRequest(userRequest); InternalCache responseCache = Internal.instance.internalCache(client); Response cacheCandidate = responseCache != null ? responseCache.get(request) : null; long now = System.currentTimeMillis(); cacheStrategy = new CacheStrategy.Factory(now, request, cacheCandidate).get(); networkRequest = cacheStrategy.networkRequest; cacheResponse = cacheStrategy.cacheResponse; if (responseCache != null) { responseCache.trackResponse(cacheStrategy); } if (cacheCandidate != null && cacheResponse == null) { closeQuietly(cacheCandidate.body()); // The cache candidate wasn't applicable. Close it. } if (networkRequest != null) { // Open a connection unless we inherited one from a redirect. if (connection == null) { connect(); } transport = Internal.instance.newTransport(connection, this); // If the caller's control flow writes the request body, we need to create that stream // immediately. And that means we need to immediately write the request headers, so we can // start streaming the request body. (We may already have a request body if we're retrying a // failed POST.) if (callerWritesRequestBody && permitsRequestBody() && requestBodyOut == null) { long contentLength = OkHeaders.contentLength(request); if (bufferRequestBody) { if (contentLength > Integer.MAX_VALUE) { throw new IllegalStateException("Use setFixedLengthStreamingMode() or " + "setChunkedStreamingMode() for requests larger than 2 GiB."); } if (contentLength != -1) { // Buffer a request body of a known length. transport.writeRequestHeaders(networkRequest); requestBodyOut = new RetryableSink((int) contentLength); } else { // Buffer a request body of an unknown length. Don't write request // headers until the entire body is ready; otherwise we can't set the // Content-Length header correctly. requestBodyOut = new RetryableSink(); } } else { transport.writeRequestHeaders(networkRequest); requestBodyOut = transport.createRequestBody(networkRequest, contentLength); } } } else { // We aren't using the network. Recycle a connection we may have inherited from a redirect. if (connection != null) { Internal.instance.recycle(client.getConnectionPool(), connection); connection = null; } if (cacheResponse != null) { // We have a valid cached response. Promote it to the user response immediately. this.userResponse = cacheResponse.newBuilder() .request(userRequest) .priorResponse(stripBody(priorResponse)) .cacheResponse(stripBody(cacheResponse)) .build(); } else { // We're forbidden from using the network, and the cache is insufficient. this.userResponse = new Response.Builder() .request(userRequest) .priorResponse(stripBody(priorResponse)) .protocol(Protocol.HTTP_1_1) .code(504) .message("Unsatisfiable Request (only-if-cached)") .body(EMPTY_BODY) .build(); } userResponse = unzip(userResponse); } } private static Response stripBody(Response response) { return response != null && response.body() != null ? response.newBuilder().body(null).build() : response; } /** Connect to the origin server either directly or via a proxy. */ private void connect() throws IOException { if (connection != null) throw new IllegalStateException(); if (routeSelector == null) { address = createAddress(client, networkRequest); routeSelector = RouteSelector.get(address, networkRequest, client); } connection = nextConnection(); route = connection.getRoute(); } /** * Returns the next connection to attempt. * * @throws java.util.NoSuchElementException if there are no more routes to attempt. */ private Connection nextConnection() throws IOException { Connection connection = createNextConnection(); Internal.instance.connectAndSetOwner(client, connection, this, networkRequest); return connection; } private Connection createNextConnection() throws IOException { ConnectionPool pool = client.getConnectionPool(); // Always prefer pooled connections over new connections. for (Connection pooled; (pooled = pool.get(address)) != null; ) { if (networkRequest.method().equals("GET") || Internal.instance.isReadable(pooled)) { return pooled; } pooled.getSocket().close(); } Route route = routeSelector.next(); return new Connection(pool, route); } /** * Called immediately before the transport transmits HTTP request headers. * This is used to observe the sent time should the request be cached. */ public void writingRequestHeaders() { if (sentRequestMillis != -1) throw new IllegalStateException(); sentRequestMillis = System.currentTimeMillis(); } boolean permitsRequestBody() { return HttpMethod.permitsRequestBody(userRequest.method()); } /** Returns the request body or null if this request doesn't have a body. */ public Sink getRequestBody() { if (cacheStrategy == null) throw new IllegalStateException(); return requestBodyOut; } public BufferedSink getBufferedRequestBody() { BufferedSink result = bufferedRequestBody; if (result != null) return result; Sink requestBody = getRequestBody(); return requestBody != null ? (bufferedRequestBody = Okio.buffer(requestBody)) : null; } public boolean hasResponse() { return userResponse != null; } public Request getRequest() { return userRequest; } /** Returns the engine's response. */ // TODO: the returned body will always be null. public Response getResponse() { if (userResponse == null) throw new IllegalStateException(); return userResponse; } public Connection getConnection() { return connection; } /** * Report and attempt to recover from {@code e}. Returns a new HTTP engine * that should be used for the retry if {@code e} is recoverable, or null if * the failure is permanent. Requests with a body can only be recovered if the * body is buffered. */ public HttpEngine recover(IOException e, Sink requestBodyOut) { if (routeSelector != null && connection != null) { connectFailed(routeSelector, e); } boolean canRetryRequestBody = requestBodyOut == null || requestBodyOut instanceof RetryableSink; if (routeSelector == null && connection == null // No connection. || routeSelector != null && !routeSelector.hasNext() // No more routes to attempt. || !isRecoverable(e) || !canRetryRequestBody) { return null; } Connection connection = close(); // For failure recovery, use the same route selector with a new connection. return new HttpEngine(client, userRequest, bufferRequestBody, callerWritesRequestBody, forWebSocket, connection, routeSelector, (RetryableSink) requestBodyOut, priorResponse); } private void connectFailed(RouteSelector routeSelector, IOException e) { // If this is a recycled connection, don't count its failure against the route. if (Internal.instance.recycleCount(connection) > 0) return; Route failedRoute = connection.getRoute(); routeSelector.connectFailed(failedRoute, e); } public HttpEngine recover(IOException e) { return recover(e, requestBodyOut); } private boolean isRecoverable(IOException e) { // If the application has opted-out of recovery, don't recover. if (!client.getRetryOnConnectionFailure()) { return false; } // If the problem was a CertificateException from the X509TrustManager, // do not retry, we didn't have an abrupt server-initiated exception. if (e instanceof SSLPeerUnverifiedException || (e instanceof SSLHandshakeException && e.getCause() instanceof CertificateException)) { return false; } // If there was a protocol problem, don't recover. if (e instanceof ProtocolException) { return false; } // If there was an interruption or timeout, don't recover. if (e instanceof InterruptedIOException) { return false; } return true; } /** * Returns the route used to retrieve the response. Null if we haven't * connected yet, or if no connection was necessary. */ public Route getRoute() { return route; } private void maybeCache() throws IOException { InternalCache responseCache = Internal.instance.internalCache(client); if (responseCache == null) return; // Should we cache this response for this request? if (!CacheStrategy.isCacheable(userResponse, networkRequest)) { if (HttpMethod.invalidatesCache(networkRequest.method())) { try { responseCache.remove(networkRequest); } catch (IOException ignored) { // The cache cannot be written. } } return; } // Offer this request to the cache. storeRequest = responseCache.put(stripBody(userResponse)); } /** * Configure the socket connection to be either pooled or closed when it is * either exhausted or closed. If it is unneeded when this is called, it will * be released immediately. */ public void releaseConnection() throws IOException { if (transport != null && connection != null) { transport.releaseConnectionOnIdle(); } connection = null; } /** * Immediately closes the socket connection if it's currently held by this * engine. Use this to interrupt an in-flight request from any thread. It's * the caller's responsibility to close the request body and response body * streams; otherwise resources may be leaked. */ public void disconnect() { if (transport != null) { try { transport.disconnect(this); } catch (IOException ignored) { } } } /** * Release any resources held by this engine. If a connection is still held by * this engine, it is returned. */ public Connection close() { if (bufferedRequestBody != null) { // This also closes the wrapped requestBodyOut. closeQuietly(bufferedRequestBody); } else if (requestBodyOut != null) { closeQuietly(requestBodyOut); } // If this engine never achieved a response body, its connection cannot be reused. if (userResponse == null) { if (connection != null) closeQuietly(connection.getSocket()); // TODO: does this break SPDY? connection = null; return null; } // Close the response body. This will recycle the connection if it is eligible. closeQuietly(userResponse.body()); // Close the connection if it cannot be reused. if (transport != null && connection != null && !transport.canReuseConnection()) { closeQuietly(connection.getSocket()); connection = null; return null; } // Prevent this engine from disconnecting a connection it no longer owns. if (connection != null && !Internal.instance.clearOwner(connection)) { connection = null; } Connection result = connection; connection = null; return result; } /** * Returns a new response that does gzip decompression on {@code response}, if transparent gzip * was both offered by OkHttp and used by the origin server. * * <p>In addition to decompression, this will also strip the corresponding headers. We strip the * Content-Encoding header to prevent the application from attempting to double decompress. We * strip the Content-Length header because it is the length of the compressed content, but the * application is only interested in the length of the uncompressed content. * * <p>This method should only be used for non-empty response bodies. Response codes like "304 Not * Modified" can include "Content-Encoding: gzip" without a response body and we will crash if we * attempt to decompress the zero-byte source. */ private Response unzip(final Response response) throws IOException { if (!transparentGzip || !"gzip".equalsIgnoreCase(userResponse.header("Content-Encoding"))) { return response; } if (response.body() == null) { return response; } GzipSource responseBody = new GzipSource(response.body().source()); Headers strippedHeaders = response.headers().newBuilder() .removeAll("Content-Encoding") .removeAll("Content-Length") .build(); return response.newBuilder() .headers(strippedHeaders) .body(new RealResponseBody(strippedHeaders, Okio.buffer(responseBody))) .build(); } /** * Returns true if the response must have a (possibly 0-length) body. * See RFC 2616 section 4.3. */ public static boolean hasBody(Response response) { // HEAD requests never yield a body regardless of the response headers. if (response.request().method().equals("HEAD")) { return false; } int responseCode = response.code(); if ((responseCode < HTTP_CONTINUE || responseCode >= 200) && responseCode != HTTP_NO_CONTENT && responseCode != HTTP_NOT_MODIFIED) { return true; } // If the Content-Length or Transfer-Encoding headers disagree with the // response code, the response is malformed. For best compatibility, we // honor the headers. if (OkHeaders.contentLength(response) != -1 || "chunked".equalsIgnoreCase(response.header("Transfer-Encoding"))) { return true; } return false; } /** * Populates request with defaults and cookies. * * <p>This client doesn't specify a default {@code Accept} header because it * doesn't know what content types the application is interested in. */ private Request networkRequest(Request request) throws IOException { Request.Builder result = request.newBuilder(); if (request.header("Host") == null) { result.header("Host", hostHeader(request.url())); } if ((connection == null || connection.getProtocol() != Protocol.HTTP_1_0) && request.header("Connection") == null) { result.header("Connection", "Keep-Alive"); } if (request.header("Accept-Encoding") == null) { transparentGzip = true; result.header("Accept-Encoding", "gzip"); } CookieHandler cookieHandler = client.getCookieHandler(); if (cookieHandler != null) { // Capture the request headers added so far so that they can be offered to the CookieHandler. // This is mostly to stay close to the RI; it is unlikely any of the headers above would // affect cookie choice besides "Host". Map<String, List<String>> headers = OkHeaders.toMultimap(result.build().headers(), null); Map<String, List<String>> cookies = cookieHandler.get(request.uri(), headers); // Add any new cookies to the request. OkHeaders.addCookies(result, cookies); } if (request.header("User-Agent") == null) { result.header("User-Agent", System.getProperty( "http.agent" )); } return result.build(); } public static String hostHeader(URL url) { return getEffectivePort(url) != getDefaultPort(url.getProtocol()) ? url.getHost() + ":" + url.getPort() : url.getHost(); } /** * Flushes the remaining request header and body, parses the HTTP response * headers and starts reading the HTTP response body if it exists. */ public void readResponse() throws IOException { if (userResponse != null) { return; // Already ready. } if (networkRequest == null && cacheResponse == null) { throw new IllegalStateException("call sendRequest() first!"); } if (networkRequest == null) { return; // No network response to read. } Response networkResponse; if (forWebSocket) { transport.writeRequestHeaders(networkRequest); networkResponse = readNetworkResponse(); } else if (!callerWritesRequestBody) { networkResponse = new NetworkInterceptorChain(0, networkRequest).proceed(networkRequest); } else { // Emit the request body's buffer so that everything is in requestBodyOut. if (bufferedRequestBody != null && bufferedRequestBody.buffer().size() > 0) { bufferedRequestBody.emit(); } // Emit the request headers if we haven't yet. We might have just learned the Content-Length. if (sentRequestMillis == -1) { if (OkHeaders.contentLength(networkRequest) == -1 && requestBodyOut instanceof RetryableSink) { long contentLength = ((RetryableSink) requestBodyOut).contentLength(); networkRequest = networkRequest.newBuilder() .header("Content-Length", Long.toString(contentLength)) .build(); } transport.writeRequestHeaders(networkRequest); } // Write the request body to the socket. if (requestBodyOut != null) { if (bufferedRequestBody != null) { // This also closes the wrapped requestBodyOut. bufferedRequestBody.close(); } else { requestBodyOut.close(); } if (requestBodyOut instanceof RetryableSink) { transport.writeRequestBody((RetryableSink) requestBodyOut); } } networkResponse = readNetworkResponse(); } receiveHeaders(networkResponse.headers()); // If we have a cache response too, then we're doing a conditional get. if (cacheResponse != null) { if (validate(cacheResponse, networkResponse)) { userResponse = cacheResponse.newBuilder() .request(userRequest) .priorResponse(stripBody(priorResponse)) .headers(combine(cacheResponse.headers(), networkResponse.headers())) .cacheResponse(stripBody(cacheResponse)) .networkResponse(stripBody(networkResponse)) .build(); networkResponse.body().close(); releaseConnection(); // Update the cache after combining headers but before stripping the // Content-Encoding header (as performed by initContentStream()). InternalCache responseCache = Internal.instance.internalCache(client); responseCache.trackConditionalCacheHit(); responseCache.update(cacheResponse, stripBody(userResponse)); userResponse = unzip(userResponse); return; } else { closeQuietly(cacheResponse.body()); } } userResponse = networkResponse.newBuilder() .request(userRequest) .priorResponse(stripBody(priorResponse)) .cacheResponse(stripBody(cacheResponse)) .networkResponse(stripBody(networkResponse)) .build(); if (hasBody(userResponse)) { maybeCache(); userResponse = unzip(cacheWritingResponse(storeRequest, userResponse)); } } class NetworkInterceptorChain implements Interceptor.Chain { private final int index; private final Request request; private int calls; NetworkInterceptorChain(int index, Request request) { this.index = index; this.request = request; } @Override public Connection connection() { return connection; } @Override public Request request() { return request; } @Override public Response proceed(Request request) throws IOException { calls++; if (index > 0) { Interceptor caller = client.networkInterceptors().get(index - 1); Address address = connection().getRoute().getAddress(); // Confirm that the interceptor uses the connection we've already prepared. if (!request.url().getHost().equals(address.getUriHost()) || getEffectivePort(request.url()) != address.getUriPort()) { throw new IllegalStateException("network interceptor " + caller + " must retain the same host and port"); } // Confirm that this is the interceptor's first call to chain.proceed(). if (calls > 1) { throw new IllegalStateException("network interceptor " + caller + " must call proceed() exactly once"); } } if (index < client.networkInterceptors().size()) { // There's another interceptor in the chain. Call that. NetworkInterceptorChain chain = new NetworkInterceptorChain(index + 1, request); Interceptor interceptor = client.networkInterceptors().get(index); Response interceptedResponse = interceptor.intercept(chain); // Confirm that the interceptor made the required call to chain.proceed(). if (chain.calls != 1) { throw new IllegalStateException("network interceptor " + interceptor + " must call proceed() exactly once"); } return interceptedResponse; } transport.writeRequestHeaders(request); if (permitsRequestBody() && request.body() != null) { Sink requestBodyOut = transport.createRequestBody(request, request.body().contentLength()); BufferedSink bufferedRequestBody = Okio.buffer(requestBodyOut); request.body().writeTo(bufferedRequestBody); bufferedRequestBody.close(); } return readNetworkResponse(); } } private Response readNetworkResponse() throws IOException { transport.finishRequest(); Response networkResponse = transport.readResponseHeaders() .request(networkRequest) .handshake(connection.getHandshake()) .header(OkHeaders.SENT_MILLIS, Long.toString(sentRequestMillis)) .header(OkHeaders.RECEIVED_MILLIS, Long.toString(System.currentTimeMillis())) .build(); if (!forWebSocket) { networkResponse = networkResponse.newBuilder() .body(transport.openResponseBody(networkResponse)) .build(); } Internal.instance.setProtocol(connection, networkResponse.protocol()); return networkResponse; } /** * Returns a new source that writes bytes to {@code cacheRequest} as they are read by the source * consumer. This is careful to discard bytes left over when the stream is closed; otherwise we * may never exhaust the source stream and therefore not complete the cached response. */ private Response cacheWritingResponse(final CacheRequest cacheRequest, Response response) throws IOException { // Some apps return a null body; for compatibility we treat that like a null cache request. if (cacheRequest == null) return response; Sink cacheBodyUnbuffered = cacheRequest.body(); if (cacheBodyUnbuffered == null) return response; final BufferedSource source = response.body().source(); final BufferedSink cacheBody = Okio.buffer(cacheBodyUnbuffered); Source cacheWritingSource = new Source() { boolean cacheRequestClosed; @Override public long read(Buffer sink, long byteCount) throws IOException { long bytesRead; try { bytesRead = source.read(sink, byteCount); } catch (IOException e) { if (!cacheRequestClosed) { cacheRequestClosed = true; cacheRequest.abort(); // Failed to write a complete cache response. } throw e; } if (bytesRead == -1) { if (!cacheRequestClosed) { cacheRequestClosed = true; cacheBody.close(); // The cache response is complete! } return -1; } sink.copyTo(cacheBody.buffer(), sink.size() - bytesRead, bytesRead); cacheBody.emitCompleteSegments(); return bytesRead; } @Override public Timeout timeout() { return source.timeout(); } @Override public void close() throws IOException { if (!cacheRequestClosed && !Util.discard(this, Transport.DISCARD_STREAM_TIMEOUT_MILLIS, MILLISECONDS)) { cacheRequestClosed = true; cacheRequest.abort(); } source.close(); } }; return response.newBuilder() .body(new RealResponseBody(response.headers(), Okio.buffer(cacheWritingSource))) .build(); } /** * Returns true if {@code cached} should be used; false if {@code network} * response should be used. */ private static boolean validate(Response cached, Response network) { if (network.code() == HTTP_NOT_MODIFIED) { return true; } // The HTTP spec says that if the network's response is older than our // cached response, we may return the cache's response. Like Chrome (but // unlike Firefox), this client prefers to return the newer response. Date lastModified = cached.headers().getDate("Last-Modified"); if (lastModified != null) { Date networkLastModified = network.headers().getDate("Last-Modified"); if (networkLastModified != null && networkLastModified.getTime() < lastModified.getTime()) { return true; } } return false; } /** * Combines cached headers with a network headers as defined by RFC 2616, * 13.5.3. */ private static Headers combine(Headers cachedHeaders, Headers networkHeaders) throws IOException { Headers.Builder result = new Headers.Builder(); for (int i = 0, size = cachedHeaders.size(); i < size; i++) { String fieldName = cachedHeaders.name(i); String value = cachedHeaders.value(i); if ("Warning".equalsIgnoreCase(fieldName) && value.startsWith("1")) { continue; // Drop 100-level freshness warnings. } if (!OkHeaders.isEndToEnd(fieldName) || networkHeaders.get(fieldName) == null) { result.add(fieldName, value); } } for (int i = 0, size = networkHeaders.size(); i < size; i++) { String fieldName = networkHeaders.name(i); if ("Content-Length".equalsIgnoreCase(fieldName)) { continue; // Ignore content-length headers of validating responses. } if (OkHeaders.isEndToEnd(fieldName)) { result.add(fieldName, networkHeaders.value(i)); } } return result.build(); } public void receiveHeaders(Headers headers) throws IOException { CookieHandler cookieHandler = client.getCookieHandler(); if (cookieHandler != null) { cookieHandler.put(userRequest.uri(), OkHeaders.toMultimap(headers, null)); } } /** * Figures out the HTTP request to make in response to receiving this engine's * response. This will either add authentication headers or follow redirects. * If a follow-up is either unnecessary or not applicable, this returns null. */ public Request followUpRequest() throws IOException { if (userResponse == null) throw new IllegalStateException(); Proxy selectedProxy = getRoute() != null ? getRoute().getProxy() : client.getProxy(); int responseCode = userResponse.code(); switch (responseCode) { case HTTP_PROXY_AUTH: if (selectedProxy.type() != Proxy.Type.HTTP) { throw new ProtocolException("Received HTTP_PROXY_AUTH (407) code while not using proxy"); } // fall-through case HTTP_UNAUTHORIZED: return OkHeaders.processAuthHeader(client.getAuthenticator(), userResponse, selectedProxy); case HTTP_PERM_REDIRECT: case HTTP_TEMP_REDIRECT: // "If the 307 or 308 status code is received in response to a request other than GET // or HEAD, the user agent MUST NOT automatically redirect the request" if (!userRequest.method().equals("GET") && !userRequest.method().equals("HEAD")) { return null; } // fall-through case HTTP_MULT_CHOICE: case HTTP_MOVED_PERM: case HTTP_MOVED_TEMP: case HTTP_SEE_OTHER: // Does the client allow redirects? if (!client.getFollowRedirects()) return null; String location = userResponse.header("Location"); if (location == null) return null; URL url = new URL(userRequest.url(), location); // Don't follow redirects to unsupported protocols. if (!url.getProtocol().equals("https") && !url.getProtocol().equals("http")) return null; // If configured, don't follow redirects between SSL and non-SSL. boolean sameProtocol = url.getProtocol().equals(userRequest.url().getProtocol()); if (!sameProtocol && !client.getFollowSslRedirects()) return null; // Redirects don't include a request body. Request.Builder requestBuilder = userRequest.newBuilder(); if (HttpMethod.permitsRequestBody(userRequest.method())) { requestBuilder.method("GET", null); requestBuilder.removeHeader("Transfer-Encoding"); requestBuilder.removeHeader("Content-Length"); requestBuilder.removeHeader("Content-Type"); } // When redirecting across hosts, drop all authentication headers. This // is potentially annoying to the application layer since they have no // way to retain them. if (!sameConnection(url)) { requestBuilder.removeHeader("Authorization"); } return requestBuilder.url(url).build(); default: return null; } } /** * Returns true if an HTTP request for {@code followUp} can reuse the * connection used by this engine. */ public boolean sameConnection(URL followUp) { URL url = userRequest.url(); return url.getHost().equals(followUp.getHost()) && getEffectivePort(url) == getEffectivePort(followUp) && url.getProtocol().equals(followUp.getProtocol()); } private static Address createAddress(OkHttpClient client, Request request) throws UnknownHostException { String uriHost = request.url().getHost(); if (uriHost == null || uriHost.length() == 0) { throw new UnknownHostException(request.url().toString()); } SSLSocketFactory sslSocketFactory = null; HostnameVerifier hostnameVerifier = null; CertificatePinner certificatePinner = null; if (request.isHttps()) { sslSocketFactory = client.getSslSocketFactory(); hostnameVerifier = client.getHostnameVerifier(); certificatePinner = client.getCertificatePinner(); } return new Address(uriHost, getEffectivePort(request.url()), client.getSocketFactory(), sslSocketFactory, hostnameVerifier, certificatePinner, client.getAuthenticator(), client.getProxy(), client.getProtocols(), client.getConnectionSpecs(), client.getProxySelector()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.api.protocolrecords; import java.nio.ByteBuffer; import java.util.EnumSet; import java.util.List; import java.util.Map; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceStability.Stable; import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.yarn.api.ApplicationMasterProtocol; import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.NMToken; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceTypeInfo; import org.apache.hadoop.yarn.proto.YarnServiceProtos.SchedulerResourceTypes; import org.apache.hadoop.yarn.util.Records; /** * The response sent by the {@code ResourceManager} to a new * {@code ApplicationMaster} on registration. * <p> * The response contains critical details such as: * <ul> * <li>Maximum capability for allocated resources in the cluster.</li> * <li>{@code ApplicationACL}s for the application.</li> * <li>ClientToAMToken master key.</li> * </ul> * * @see ApplicationMasterProtocol#registerApplicationMaster(RegisterApplicationMasterRequest) */ @Public @Stable public abstract class RegisterApplicationMasterResponse { @Private @Unstable public static RegisterApplicationMasterResponse newInstance( Resource minCapability, Resource maxCapability, Map<ApplicationAccessType, String> acls, ByteBuffer key, List<Container> containersFromPreviousAttempt, String queue, List<NMToken> nmTokensFromPreviousAttempts) { RegisterApplicationMasterResponse response = Records.newRecord(RegisterApplicationMasterResponse.class); response.setMaximumResourceCapability(maxCapability); response.setApplicationACLs(acls); response.setClientToAMTokenMasterKey(key); response.setContainersFromPreviousAttempts(containersFromPreviousAttempt); response.setNMTokensFromPreviousAttempts(nmTokensFromPreviousAttempts); response.setQueue(queue); return response; } /** * Get the maximum capability for any {@link Resource} allocated by the * <code>ResourceManager</code> in the cluster. * @return maximum capability of allocated resources in the cluster */ @Public @Stable public abstract Resource getMaximumResourceCapability(); @Private @Unstable public abstract void setMaximumResourceCapability(Resource capability); /** * Get the <code>ApplicationACL</code>s for the application. * @return all the <code>ApplicationACL</code>s */ @Public @Stable public abstract Map<ApplicationAccessType, String> getApplicationACLs(); /** * Set the <code>ApplicationACL</code>s for the application. * @param acls */ @Private @Unstable public abstract void setApplicationACLs(Map<ApplicationAccessType, String> acls); /** * <p>Get ClientToAMToken master key.</p> * <p>The ClientToAMToken master key is sent to <code>ApplicationMaster</code> * by <code>ResourceManager</code> via {@link RegisterApplicationMasterResponse} * , used to verify corresponding ClientToAMToken.</p> * @return ClientToAMToken master key */ @Public @Stable public abstract ByteBuffer getClientToAMTokenMasterKey(); /** * Set ClientToAMToken master key. */ @Public @Stable public abstract void setClientToAMTokenMasterKey(ByteBuffer key); /** * <p>Get the queue that the application was placed in.<p> * @return the queue that the application was placed in. */ @Public @Stable public abstract String getQueue(); /** * <p>Set the queue that the application was placed in.<p> */ @Public @Stable public abstract void setQueue(String queue); /** * <p> * Get the list of running containers as viewed by * <code>ResourceManager</code> from previous application attempts. * </p> * * @return the list of running containers as viewed by * <code>ResourceManager</code> from previous application attempts * @see RegisterApplicationMasterResponse#getNMTokensFromPreviousAttempts() */ @Public @Unstable public abstract List<Container> getContainersFromPreviousAttempts(); /** * Set the list of running containers as viewed by * <code>ResourceManager</code> from previous application attempts. * * @param containersFromPreviousAttempt * the list of running containers as viewed by * <code>ResourceManager</code> from previous application attempts. */ @Private @Unstable public abstract void setContainersFromPreviousAttempts( List<Container> containersFromPreviousAttempt); /** * Get the list of NMTokens for communicating with the NMs where the * containers of previous application attempts are running. * * @return the list of NMTokens for communicating with the NMs where the * containers of previous application attempts are running. * * @see RegisterApplicationMasterResponse#getContainersFromPreviousAttempts() */ @Public @Stable public abstract List<NMToken> getNMTokensFromPreviousAttempts(); /** * Set the list of NMTokens for communicating with the NMs where the the * containers of previous application attempts are running. * * @param nmTokens * the list of NMTokens for communicating with the NMs where the * containers of previous application attempts are running. */ @Private @Unstable public abstract void setNMTokensFromPreviousAttempts(List<NMToken> nmTokens); /** * Get a set of the resource types considered by the scheduler. * * @return a Map of RM settings */ @Public @Unstable public abstract EnumSet<SchedulerResourceTypes> getSchedulerResourceTypes(); /** * Set the resource types used by the scheduler. * * @param types * a set of the resource types that the scheduler considers during * scheduling */ @Private @Unstable public abstract void setSchedulerResourceTypes( EnumSet<SchedulerResourceTypes> types); /** * Get list of supported resource profiles from RM. * * @return a map of resource profiles and its capabilities. */ @Public @Unstable public abstract Map<String, Resource> getResourceProfiles(); /** * Set supported resource profiles for RM. * * @param profiles * a map of resource profiles with its capabilities. */ @Private @Unstable public abstract void setResourceProfiles(Map<String, Resource> profiles); /** * Get available resource types supported by RM. * * @return a Map of RM settings */ @Public @Unstable public abstract List<ResourceTypeInfo> getResourceTypes(); /** * Set the resource types used by RM. * * @param types * a set of the resource types supported by RM. */ @Private @Unstable public abstract void setResourceTypes(List<ResourceTypeInfo> types); }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.osconfig.v1.model; /** * Operating system information for the VM. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the OS Config API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class InventoryOsInfo extends com.google.api.client.json.GenericJson { /** * The system architecture of the operating system. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String architecture; /** * The VM hostname. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String hostname; /** * The kernel release of the operating system. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kernelRelease; /** * The kernel version of the operating system. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kernelVersion; /** * The operating system long name. For example 'Debian GNU/Linux 9' or 'Microsoft Window Server * 2019 Datacenter'. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String longName; /** * The current version of the OS Config agent running on the VM. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String osconfigAgentVersion; /** * The operating system short name. For example, 'windows' or 'debian'. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String shortName; /** * The version of the operating system. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String version; /** * The system architecture of the operating system. * @return value or {@code null} for none */ public java.lang.String getArchitecture() { return architecture; } /** * The system architecture of the operating system. * @param architecture architecture or {@code null} for none */ public InventoryOsInfo setArchitecture(java.lang.String architecture) { this.architecture = architecture; return this; } /** * The VM hostname. * @return value or {@code null} for none */ public java.lang.String getHostname() { return hostname; } /** * The VM hostname. * @param hostname hostname or {@code null} for none */ public InventoryOsInfo setHostname(java.lang.String hostname) { this.hostname = hostname; return this; } /** * The kernel release of the operating system. * @return value or {@code null} for none */ public java.lang.String getKernelRelease() { return kernelRelease; } /** * The kernel release of the operating system. * @param kernelRelease kernelRelease or {@code null} for none */ public InventoryOsInfo setKernelRelease(java.lang.String kernelRelease) { this.kernelRelease = kernelRelease; return this; } /** * The kernel version of the operating system. * @return value or {@code null} for none */ public java.lang.String getKernelVersion() { return kernelVersion; } /** * The kernel version of the operating system. * @param kernelVersion kernelVersion or {@code null} for none */ public InventoryOsInfo setKernelVersion(java.lang.String kernelVersion) { this.kernelVersion = kernelVersion; return this; } /** * The operating system long name. For example 'Debian GNU/Linux 9' or 'Microsoft Window Server * 2019 Datacenter'. * @return value or {@code null} for none */ public java.lang.String getLongName() { return longName; } /** * The operating system long name. For example 'Debian GNU/Linux 9' or 'Microsoft Window Server * 2019 Datacenter'. * @param longName longName or {@code null} for none */ public InventoryOsInfo setLongName(java.lang.String longName) { this.longName = longName; return this; } /** * The current version of the OS Config agent running on the VM. * @return value or {@code null} for none */ public java.lang.String getOsconfigAgentVersion() { return osconfigAgentVersion; } /** * The current version of the OS Config agent running on the VM. * @param osconfigAgentVersion osconfigAgentVersion or {@code null} for none */ public InventoryOsInfo setOsconfigAgentVersion(java.lang.String osconfigAgentVersion) { this.osconfigAgentVersion = osconfigAgentVersion; return this; } /** * The operating system short name. For example, 'windows' or 'debian'. * @return value or {@code null} for none */ public java.lang.String getShortName() { return shortName; } /** * The operating system short name. For example, 'windows' or 'debian'. * @param shortName shortName or {@code null} for none */ public InventoryOsInfo setShortName(java.lang.String shortName) { this.shortName = shortName; return this; } /** * The version of the operating system. * @return value or {@code null} for none */ public java.lang.String getVersion() { return version; } /** * The version of the operating system. * @param version version or {@code null} for none */ public InventoryOsInfo setVersion(java.lang.String version) { this.version = version; return this; } @Override public InventoryOsInfo set(String fieldName, Object value) { return (InventoryOsInfo) super.set(fieldName, value); } @Override public InventoryOsInfo clone() { return (InventoryOsInfo) super.clone(); } }
/* * Copyright 2012-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.actuate.autoconfigure.endpoint; import org.junit.Test; import org.springframework.boot.actuate.endpoint.DefaultEnablement; import org.springframework.boot.actuate.endpoint.EndpointExposure; import org.springframework.boot.test.util.TestPropertyValues; import org.springframework.mock.env.MockEnvironment; import org.springframework.util.ObjectUtils; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link EndpointEnablementProvider}. * * @author Stephane Nicoll */ public class EndpointEnablementProviderTests { @Test public void defaultEnablementDisabled() { EndpointEnablement enablement = getEndpointEnablement("foo", DefaultEnablement.DISABLED); validate(enablement, false, "endpoint 'foo' is disabled by default"); } @Test public void defaultEnablementDisabledWithGeneralEnablement() { EndpointEnablement enablement = getEndpointEnablement("foo", DefaultEnablement.DISABLED, "endpoints.default.enabled=true"); validate(enablement, false, "endpoint 'foo' is disabled by default"); } @Test public void defaultEnablementDisabledWithGeneralTechEnablement() { EndpointEnablement enablement = getEndpointEnablement("foo", DefaultEnablement.DISABLED, EndpointExposure.WEB, "endpoints.default.web.enabled=true"); validate(enablement, false, "endpoint 'foo' (web) is disabled by default"); } @Test public void defaultEnablementDisabledWithOverride() { EndpointEnablement enablement = getEndpointEnablement("foo", DefaultEnablement.DISABLED, "endpoints.foo.enabled=true"); validate(enablement, true, "found property endpoints.foo.enabled"); } @Test public void defaultEnablementDisabledWithTechOverride() { EndpointEnablement enablement = getEndpointEnablement("foo", DefaultEnablement.DISABLED, EndpointExposure.WEB, "endpoints.foo.web.enabled=true"); validate(enablement, true, "found property endpoints.foo.web.enabled"); } @Test public void defaultEnablementDisabledWithIrrelevantTechOverride() { EndpointEnablement enablement = getEndpointEnablement("foo", DefaultEnablement.DISABLED, EndpointExposure.WEB, "endpoints.foo.jmx.enabled=true"); validate(enablement, false, "endpoint 'foo' (web) is disabled by default"); } @Test public void defaultEnablementEnabled() { EndpointEnablement enablement = getEndpointEnablement("bar", DefaultEnablement.ENABLED); validate(enablement, true, "endpoint 'bar' is enabled by default"); } @Test public void defaultEnablementEnabledWithGeneralDisablement() { EndpointEnablement enablement = getEndpointEnablement("bar", DefaultEnablement.ENABLED, "endpoints.default.enabled=false"); validate(enablement, true, "endpoint 'bar' is enabled by default"); } @Test public void defaultEnablementEnabledWithGeneralTechDisablement() { EndpointEnablement enablement = getEndpointEnablement("bar", DefaultEnablement.ENABLED, EndpointExposure.JMX, "endpoints.default.jmx.enabled=false"); validate(enablement, true, "endpoint 'bar' (jmx) is enabled by default"); } @Test public void defaultEnablementEnabledWithOverride() { EndpointEnablement enablement = getEndpointEnablement("bar", DefaultEnablement.ENABLED, "endpoints.bar.enabled=false"); validate(enablement, false, "found property endpoints.bar.enabled"); } @Test public void defaultEnablementEnabledWithTechOverride() { EndpointEnablement enablement = getEndpointEnablement("bar", DefaultEnablement.ENABLED, EndpointExposure.JMX, "endpoints.bar.jmx.enabled=false"); validate(enablement, false, "found property endpoints.bar.jmx.enabled"); } @Test public void defaultEnablementEnabledWithIrrelevantTechOverride() { EndpointEnablement enablement = getEndpointEnablement("bar", DefaultEnablement.ENABLED, EndpointExposure.JMX, "endpoints.bar.web.enabled=false"); validate(enablement, true, "endpoint 'bar' (jmx) is enabled by default"); } @Test public void defaultEnablementNeutral() { EndpointEnablement enablement = getEndpointEnablement("biz", DefaultEnablement.NEUTRAL); validate(enablement, true, "endpoint 'biz' is enabled (default)"); } @Test public void defaultEnablementNeutralWeb() { EndpointEnablement enablement = getEndpointEnablement("biz", DefaultEnablement.NEUTRAL, EndpointExposure.WEB); validate(enablement, false, "endpoint 'default' (web) is disabled by default"); } @Test public void defaultEnablementNeutralJmx() { EndpointEnablement enablement = getEndpointEnablement("biz", DefaultEnablement.NEUTRAL, EndpointExposure.JMX); validate(enablement, true, "endpoint 'biz' (jmx) is enabled (default for jmx endpoints)"); } @Test public void defaultEnablementNeutralWithGeneralDisablement() { EndpointEnablement enablement = getEndpointEnablement("biz", DefaultEnablement.NEUTRAL, "endpoints.default.enabled=false"); validate(enablement, false, "found property endpoints.default.enabled"); } @Test public void defaultEnablementNeutralWebWithTechDisablement() { EndpointEnablement enablement = getEndpointEnablement("biz", DefaultEnablement.NEUTRAL, EndpointExposure.JMX, "endpoints.default.jmx.enabled=false"); validate(enablement, false, "found property endpoints.default.jmx.enabled"); } @Test public void defaultEnablementNeutralTechTakesPrecedence() { EndpointEnablement enablement = getEndpointEnablement("biz", DefaultEnablement.NEUTRAL, EndpointExposure.JMX, "endpoints.default.enabled=true", "endpoints.default.jmx.enabled=false"); validate(enablement, false, "found property endpoints.default.jmx.enabled"); } @Test public void defaultEnablementNeutralWebWithTechEnablement() { EndpointEnablement enablement = getEndpointEnablement("biz", DefaultEnablement.NEUTRAL, EndpointExposure.WEB, "endpoints.default.web.enabled=true"); validate(enablement, true, "found property endpoints.default.web.enabled"); } @Test public void defaultEnablementNeutralWebWithUnrelatedTechDisablement() { EndpointEnablement enablement = getEndpointEnablement("biz", DefaultEnablement.NEUTRAL, EndpointExposure.JMX, "endpoints.default.web.enabled=false"); validate(enablement, true, "endpoint 'biz' (jmx) is enabled (default for jmx endpoints)"); } @Test public void defaultEnablementNeutralWithOverride() { EndpointEnablement enablement = getEndpointEnablement("biz", DefaultEnablement.NEUTRAL, "endpoints.biz.enabled=false"); validate(enablement, false, "found property endpoints.biz.enabled"); } @Test public void defaultEnablementNeutralWebWithOverride() { EndpointEnablement enablement = getEndpointEnablement("biz", DefaultEnablement.NEUTRAL, EndpointExposure.WEB, "endpoints.biz.web.enabled=true"); validate(enablement, true, "found property endpoints.biz.web.enabled"); } @Test public void defaultEnablementNeutralJmxWithOverride() { EndpointEnablement enablement = getEndpointEnablement("biz", DefaultEnablement.NEUTRAL, EndpointExposure.JMX, "endpoints.biz.jmx.enabled=false"); validate(enablement, false, "found property endpoints.biz.jmx.enabled"); } @Test public void defaultEnablementNeutralTechTakesPrecedenceOnEverything() { EndpointEnablement enablement = getEndpointEnablement("biz", DefaultEnablement.NEUTRAL, EndpointExposure.JMX, "endpoints.default.enabled=false", "endpoints.default.jmx.enabled=false", "endpoints.biz.enabled=false", "endpoints.biz.jmx.enabled=true"); validate(enablement, true, "found property endpoints.biz.jmx.enabled"); } @Test public void defaultEnablementNeutralSpecificTakesPrecedenceOnDefaults() { EndpointEnablement enablement = getEndpointEnablement("biz", DefaultEnablement.NEUTRAL, EndpointExposure.JMX, "endpoints.default.enabled=false", "endpoints.default.jmx.enabled=false", "endpoints.biz.enabled=true"); validate(enablement, true, "found property endpoints.biz.enabled"); } @Test public void defaultEnablementNeutralDefaultTechTakesPrecedenceOnGeneralDefault() { EndpointEnablement enablement = getEndpointEnablement("biz", DefaultEnablement.NEUTRAL, EndpointExposure.JMX, "endpoints.default.enabled=false", "endpoints.default.jmx.enabled=true"); validate(enablement, true, "found property endpoints.default.jmx.enabled"); } private EndpointEnablement getEndpointEnablement(String id, DefaultEnablement enabledByDefault, String... environment) { return getEndpointEnablement(id, enabledByDefault, null, environment); } private EndpointEnablement getEndpointEnablement(String id, DefaultEnablement enabledByDefault, EndpointExposure exposure, String... environment) { MockEnvironment env = new MockEnvironment(); TestPropertyValues.of(environment).applyTo(env); EndpointEnablementProvider provider = new EndpointEnablementProvider(env); if (exposure != null) { return provider.getEndpointEnablement(id, enabledByDefault, exposure); } return provider.getEndpointEnablement(id, enabledByDefault); } private void validate(EndpointEnablement enablement, boolean enabled, String... messages) { assertThat(enablement).isNotNull(); assertThat(enablement.isEnabled()).isEqualTo(enabled); if (!ObjectUtils.isEmpty(messages)) { assertThat(enablement.getReason()).contains(messages); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.processors.groovyx; import groovy.lang.GroovyShell; import groovy.lang.Script; import org.apache.nifi.annotation.behavior.DynamicProperty; import org.apache.nifi.annotation.behavior.EventDriven; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.Restricted; import org.apache.nifi.annotation.behavior.Restriction; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.SeeAlso; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.annotation.lifecycle.OnScheduled; import org.apache.nifi.annotation.lifecycle.OnStopped; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.RequiredPermission; import org.apache.nifi.components.ValidationContext; import org.apache.nifi.components.ValidationResult; import org.apache.nifi.controller.ControllerService; import org.apache.nifi.dbcp.DBCPService; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.ProcessorInitializationContext; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.processors.groovyx.flow.GroovyProcessSessionWrap; import org.apache.nifi.processors.groovyx.sql.OSql; import org.apache.nifi.processors.groovyx.util.Files; import org.apache.nifi.processors.groovyx.util.Validators; import org.codehaus.groovy.control.CompilerConfiguration; import org.codehaus.groovy.runtime.ResourceGroovyMethods; import org.codehaus.groovy.runtime.StackTraceUtils; import java.io.File; import java.lang.reflect.Method; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @EventDriven @InputRequirement(InputRequirement.Requirement.INPUT_ALLOWED) @Tags({"script", "groovy", "groovyx"}) @CapabilityDescription( "Experimental Extended Groovy script processor. The script is responsible for " + "handling the incoming flow file (transfer to SUCCESS or remove, e.g.) as well as any flow files created by " + "the script. If the handling is incomplete or incorrect, the session will be rolled back.") @Restricted( restrictions = { @Restriction( requiredPermission = RequiredPermission.EXECUTE_CODE, explanation = "Provides operator the ability to execute arbitrary code assuming all permissions that NiFi has.") } ) @SeeAlso(classNames={"org.apache.nifi.processors.script.ExecuteScript"}) @DynamicProperty(name = "A script engine property to update", value = "The value to set it to", expressionLanguageScope = ExpressionLanguageScope.FLOWFILE_ATTRIBUTES, description = "Updates a script engine property specified by the Dynamic Property's key with the value " + "specified by the Dynamic Property's value. Use `CTL.` to access any controller services.") public class ExecuteGroovyScript extends AbstractProcessor { public static final String GROOVY_CLASSPATH = "${groovy.classes.path}"; private static final String PRELOADS = "import org.apache.nifi.components.*;" + "import org.apache.nifi.flowfile.FlowFile;" + "import org.apache.nifi.processor.*;" + "import org.apache.nifi.processor.FlowFileFilter.FlowFileFilterResult;" + "import org.apache.nifi.processor.exception.*;" + "import org.apache.nifi.processor.io.*;" + "import org.apache.nifi.processor.util.*;" + "import org.apache.nifi.processors.script.*;" + "import org.apache.nifi.logging.ComponentLog;"; public static final PropertyDescriptor SCRIPT_FILE = new PropertyDescriptor.Builder() .name("groovyx-script-file") .displayName("Script File") .required(false) .description("Path to script file to execute. Only one of Script File or Script Body may be used") .addValidator(Validators.createFileExistsAndReadableValidator()) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .build(); public static final PropertyDescriptor SCRIPT_BODY = new PropertyDescriptor.Builder() .name("groovyx-script-body") .displayName("Script Body") .required(false) .description("Body of script to execute. Only one of Script File or Script Body may be used") .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.NONE) .build(); public static String[] VALID_FAIL_STRATEGY = {"rollback", "transfer to failure"}; public static final PropertyDescriptor FAIL_STRATEGY = new PropertyDescriptor.Builder() .name("groovyx-failure-strategy") .displayName("Failure strategy") .description("What to do with unhandled exceptions. If you want to manage exception by code then keep the default value `rollback`." +" If `transfer to failure` selected and unhandled exception occurred then all flowFiles received from incoming queues in this session" +" will be transferred to `failure` relationship with additional attributes set: ERROR_MESSAGE and ERROR_STACKTRACE." +" If `rollback` selected and unhandled exception occurred then all flowFiles received from incoming queues will be penalized and returned." +" If the processor has no incoming connections then this parameter has no effect." ) .required(true).expressionLanguageSupported(ExpressionLanguageScope.NONE) .allowableValues(VALID_FAIL_STRATEGY) .defaultValue(VALID_FAIL_STRATEGY[0]) .build(); public static final PropertyDescriptor ADD_CLASSPATH = new PropertyDescriptor.Builder() .name("groovyx-additional-classpath") .displayName("Additional classpath") .required(false) .description("Classpath list separated by semicolon. You can use masks like `*`, `*.jar` in file name.") .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .build(); public static final Relationship REL_SUCCESS = new Relationship.Builder().name("success").description("FlowFiles that were successfully processed").build(); public static final Relationship REL_FAILURE = new Relationship.Builder().name("failure").description("FlowFiles that failed to be processed").build(); private List<PropertyDescriptor> descriptors; private Set<Relationship> relationships; //parameters evaluated on Start or on Validate File scriptFile = null; //SCRIPT_FILE String scriptBody = null; //SCRIPT_BODY String addClasspath = null; //ADD_CLASSPATH String groovyClasspath = null; //evaluated from GROOVY_CLASSPATH = ${groovy.classes.path} global property //compiled script volatile GroovyShell shell = null; //new GroovyShell(); volatile Class<Script> compiled = null; //compiled script volatile long scriptLastModified = 0; //last scriptFile modification to check if recompile required @Override protected void init(final ProcessorInitializationContext context) { List<PropertyDescriptor> descriptors = new ArrayList<>(); descriptors.add(SCRIPT_FILE); descriptors.add(SCRIPT_BODY); descriptors.add(FAIL_STRATEGY); descriptors.add(ADD_CLASSPATH); this.descriptors = Collections.unmodifiableList(descriptors); HashSet<Relationship> relationshipSet = new HashSet<>(); relationshipSet.add(REL_SUCCESS); relationshipSet.add(REL_FAILURE); relationships = Collections.unmodifiableSet(relationshipSet); } @Override public Set<Relationship> getRelationships() { return relationships; } @Override public final List<PropertyDescriptor> getSupportedPropertyDescriptors() { return descriptors; } private File asFile(String f) { if (f == null || f.length() == 0) { return null; } return new File(f); } private void callScriptStatic(String method, final ProcessContext context) throws IllegalAccessException, java.lang.reflect.InvocationTargetException { if (compiled != null) { Method m = null; try { m = compiled.getDeclaredMethod(method, ProcessContext.class); } catch (NoSuchMethodException e) { // The method will not be invoked if it does not exist } if (m == null) { try { m = compiled.getDeclaredMethod(method, Object.class); } catch (NoSuchMethodException e) { // The method will not be invoked if it does not exist } } if (m != null) { m.invoke(null, context); } } } /** * Let's do validation by script compile at this point. * * @param context provides a mechanism for obtaining externally managed values, such as property values and supplies convenience methods for operating on those values * @return Collection of ValidationResult objects that will be added to any other validation findings - may be null */ protected Collection<ValidationResult> customValidate(final ValidationContext context) { this.scriptFile = asFile(context.getProperty(SCRIPT_FILE).evaluateAttributeExpressions().getValue()); //SCRIPT_FILE this.scriptBody = context.getProperty(SCRIPT_BODY).getValue(); //SCRIPT_BODY this.addClasspath = context.getProperty(ADD_CLASSPATH).evaluateAttributeExpressions().getValue(); //ADD_CLASSPATH this.groovyClasspath = context.newPropertyValue(GROOVY_CLASSPATH).evaluateAttributeExpressions().getValue(); //evaluated from ${groovy.classes.path} global property final Collection<ValidationResult> results = new HashSet<>(); try { getGroovyScript(); } catch (Throwable t) { results.add(new ValidationResult.Builder().subject("GroovyScript").input(this.scriptFile != null ? this.scriptFile.toString() : null).valid(false).explanation(t.toString()).build()); } return results; } /** * Hook method allowing subclasses to eagerly react to a configuration * change for the given property descriptor. As an alternative to using this * method a processor may simply get the latest value whenever it needs it * and if necessary lazily evaluate it. * * @param descriptor of the modified property * @param oldValue non-null property value (previous) * @param newValue the new property value or if null indicates the property was removed */ @Override public void onPropertyModified(final PropertyDescriptor descriptor, final String oldValue, final String newValue) { this.shell = null; this.compiled = null; this.scriptLastModified = 0; } /** * Performs setup operations when the processor is scheduled to run. This includes evaluating the processor's * properties, as well as reloading the script (from file or the "Script Body" property) * * @param context the context in which to perform the setup operations */ @OnScheduled public void onScheduled(final ProcessContext context) { this.scriptFile = asFile(context.getProperty(SCRIPT_FILE).evaluateAttributeExpressions().getValue()); //SCRIPT_FILE this.scriptBody = context.getProperty(SCRIPT_BODY).getValue(); //SCRIPT_BODY this.addClasspath = context.getProperty(ADD_CLASSPATH).evaluateAttributeExpressions().getValue(); //ADD_CLASSPATH this.groovyClasspath = context.newPropertyValue(GROOVY_CLASSPATH).evaluateAttributeExpressions().getValue(); //evaluated from ${groovy.classes.path} global property try { //compile if needed getGroovyScript(); } catch (Throwable t) { getLogger().error("Load script failed: " + t); throw new ProcessException("Load script failed: " + t, t); } try { callScriptStatic("onStart", context); } catch (Throwable t) { getLogger().error("onStart failed: " + t); throw new ProcessException("onStart failed: " + t, t); } } @OnStopped public void onStopped(final ProcessContext context) { try { callScriptStatic("onStop", context); } catch (Throwable t) { throw new ProcessException("Failed to finalize groovy script:\n" + t, t); } //reset of compiled script not needed here because we did it onPropertyModified } // used in validation and processing @SuppressWarnings("unchecked") Script getGroovyScript() throws Throwable { GroovyMethods.init(); if (scriptBody != null && scriptFile != null) { throw new ProcessException("Only one parameter accepted: `" + SCRIPT_BODY.getDisplayName() + "` or `" + SCRIPT_FILE.getDisplayName() + "`"); } if (scriptBody == null && scriptFile == null) { throw new ProcessException("At least one parameter required: `" + SCRIPT_BODY.getDisplayName() + "` or `" + SCRIPT_FILE.getDisplayName() + "`"); } if (shell == null) { CompilerConfiguration conf = new CompilerConfiguration(); conf.setDebug(true); shell = new GroovyShell(conf); if (addClasspath != null && addClasspath.length() > 0) { for (File fcp : Files.listPathsFiles(addClasspath)) { if (!fcp.exists()) { throw new ProcessException("Path not found `" + fcp + "` for `" + ADD_CLASSPATH.getDisplayName() + "`"); } shell.getClassLoader().addClasspath(fcp.toString()); } } //try to add classpath with groovy classes if (groovyClasspath != null && groovyClasspath.length() > 0) { shell.getClassLoader().addClasspath(groovyClasspath); } } Script script = null; if (compiled != null && scriptFile != null && scriptLastModified != scriptFile.lastModified() && System.currentTimeMillis() - scriptFile.lastModified() > 3000) { //force recompile if script file has been changed compiled = null; } if (compiled == null) { String scriptName; String scriptText; if (scriptFile != null) { scriptName = scriptFile.getName(); scriptLastModified = scriptFile.lastModified(); scriptText = ResourceGroovyMethods.getText(scriptFile, "UTF-8"); } else { scriptName = "Script" + Long.toHexString(scriptBody.hashCode()) + ".groovy"; scriptText = scriptBody; } script = shell.parse(PRELOADS + scriptText, scriptName); compiled = (Class<Script>) script.getClass(); } if (script == null) { script = compiled.newInstance(); } Thread.currentThread().setContextClassLoader(shell.getClassLoader()); return script; } /** * init SQL variables from DBCP services */ @SuppressWarnings("unchecked") private void onInitSQL(HashMap SQL) throws SQLException { for (Map.Entry e : (Set<Map.Entry>) SQL.entrySet()) { DBCPService s = (DBCPService) e.getValue(); OSql sql = new OSql(s.getConnection()); //try to set autocommit to false try { if (sql.getConnection().getAutoCommit()) { sql.getConnection().setAutoCommit(false); } } catch (Throwable ei) { getLogger().warn("Failed to set autocommit=false for `" + e.getKey() + "`", ei); } e.setValue(sql); } } /** * before commit SQL services */ @SuppressWarnings("unchecked") private void onCommitSQL(HashMap SQL) throws SQLException { for (Map.Entry e : (Set<Map.Entry>) SQL.entrySet()) { OSql sql = (OSql) e.getValue(); if (!sql.getConnection().getAutoCommit()) { sql.commit(); } } } /** * finalize SQL services. no exceptions should be thrown. */ @SuppressWarnings("unchecked") private void onFinitSQL(HashMap SQL) { for (Map.Entry e : (Set<Map.Entry>) SQL.entrySet()) { OSql sql = (OSql) e.getValue(); try { if (!sql.getConnection().getAutoCommit()) { sql.getConnection().setAutoCommit(true); //default autocommit value in nifi } } catch (Throwable ei) { getLogger().warn("Failed to set autocommit=true for `" + e.getKey() + "`", ei); } try { sql.close(); sql = null; } catch (Throwable ei) { // Nothing to do } } } /** * exception SQL services */ @SuppressWarnings("unchecked") private void onFailSQL(HashMap SQL) { for (Map.Entry e : (Set<Map.Entry>) SQL.entrySet()) { OSql sql = (OSql) e.getValue(); try { if (!sql.getConnection().getAutoCommit()) { sql.rollback(); } } catch (Throwable ei) { //the rollback error is usually not important, rather it is the DML error that is really important } } } @Override public void onTrigger(final ProcessContext context, final ProcessSession _session) throws ProcessException { boolean toFailureOnError = VALID_FAIL_STRATEGY[1].equals(context.getProperty(FAIL_STRATEGY).getValue()); //create wrapped session to control list of newly created and files got from this session. //so transfer original input to failure will be possible GroovyProcessSessionWrap session = new GroovyProcessSessionWrap(_session, toFailureOnError); HashMap CTL = new AccessMap("CTL"); HashMap SQL = new AccessMap("SQL"); try { Script script = getGroovyScript(); //compilation must be moved to validation Map bindings = script.getBinding().getVariables(); bindings.clear(); // Find the user-added properties and bind them for the script for (Map.Entry<PropertyDescriptor, String> property : context.getProperties().entrySet()) { if (property.getKey().isDynamic()) { if (property.getKey().getName().startsWith("CTL.")) { //get controller service ControllerService ctl = context.getProperty(property.getKey()).asControllerService(ControllerService.class); CTL.put(property.getKey().getName().substring(4), ctl); } else if (property.getKey().getName().startsWith("SQL.")) { DBCPService dbcp = context.getProperty(property.getKey()).asControllerService(DBCPService.class); SQL.put(property.getKey().getName().substring(4), dbcp); } else { // Add the dynamic property bound to its full PropertyValue to the script engine if (property.getValue() != null) { bindings.put(property.getKey().getName(), context.getProperty(property.getKey())); } } } } onInitSQL(SQL); bindings.put("session", session); bindings.put("context", context); bindings.put("log", getLogger()); bindings.put("REL_SUCCESS", REL_SUCCESS); bindings.put("REL_FAILURE", REL_FAILURE); bindings.put("CTL", CTL); bindings.put("SQL", SQL); script.run(); bindings.clear(); onCommitSQL(SQL); session.commit(); } catch (Throwable t) { getLogger().error(t.toString(), t); onFailSQL(SQL); if (toFailureOnError) { //transfer all received to failure with two new attributes: ERROR_MESSAGE and ERROR_STACKTRACE. session.revertReceivedTo(REL_FAILURE, StackTraceUtils.deepSanitize(t)); } else { session.rollback(true); } } finally { onFinitSQL(SQL); } } /** * Returns a PropertyDescriptor for the given name. This is for the user to be able to define their own properties * which will be available as variables in the script * * @param propertyDescriptorName used to lookup if any property descriptors exist for that name * @return a PropertyDescriptor object corresponding to the specified dynamic property name */ @Override protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(final String propertyDescriptorName) { if (propertyDescriptorName.startsWith("CTL.")) { return new PropertyDescriptor.Builder() .name(propertyDescriptorName) .required(false) .description("Controller service accessible from code as `" + propertyDescriptorName + "`") .dynamic(true) .identifiesControllerService(ControllerService.class) .build(); } if (propertyDescriptorName.startsWith("SQL.")) { return new PropertyDescriptor.Builder() .name(propertyDescriptorName) .required(false) .description("The `groovy.sql.Sql` object created from DBCP Controller service and accessible from code as `" + propertyDescriptorName + "`") .dynamic(true) .identifiesControllerService(DBCPService.class) .build(); } return new PropertyDescriptor.Builder() .name(propertyDescriptorName) .required(false) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY) .dynamic(true) .build(); } /** simple HashMap with exception on access of non-existent key */ private class AccessMap extends HashMap { private String parentKey; AccessMap(String parentKey){ this.parentKey=parentKey; } @Override public Object get(Object key) { if (!containsKey(key)) { throw new RuntimeException("The `" + parentKey + "." + key + "` not defined in processor properties"); } return super.get(key); } } }
package com.morzeux.bioseclogger; import java.io.IOException; import java.util.Locale; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.AdapterView; import android.widget.Button; import android.widget.CheckBox; import android.widget.EditText; import android.widget.ProgressBar; import android.widget.Spinner; import android.widget.TextView; import biosecLogger.analysis.Analyzer; import biosecLogger.charts.ChartBuilder; import biosecLogger.core.OptionsManager; import biosecLogger.core.UserLoggerManager; import com.morzeux.bioseclogger.logic.ActionMenu; import com.morzeux.bioseclogger.logic.ExtendedStorageHandler; import com.morzeux.bioseclogger.logic.PopupBuilder; import com.morzeux.bioseclogger.tasks.AnalyserTask; import com.morzeux.bioseclogger.tasks.CheckNewVersionTask; import com.morzeux.bioseclogger.tasks.DownloadTask; /** * Activity analyzes experimental samples with different options. * * @author Stefan Smihla * */ public class AnalyzeActivity extends Activity { private Analyzer analyser; private OptionsManager oManager; private TextView infoText; private ProgressBar loadingBar; private Spinner algorithmDropdown; private EditText nGraphsEditText; private EditText pThresholdEditText; private EditText sensitivityEditText; private Button testButton; private Button viewButton; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_analyze); oManager = new OptionsManager(this); analyser = new Analyzer(oManager); infoText = (TextView) findViewById(R.id.resultsLabel); loadingBar = (ProgressBar) findViewById(R.id.loadingBar); testButton = (Button) findViewById(R.id.testButton); viewButton = (Button) findViewById(R.id.viewSamplesButton); algorithmDropdown = (Spinner) findViewById(R.id.algorithmDropdown); algorithmDropdown.setOnItemSelectedListener(new ItemSelector()); nGraphsEditText = (EditText) findViewById(R.id.nGraphsEditText); pThresholdEditText = (EditText) findViewById(R.id.pThresholdEditText); sensitivityEditText = (EditText) findViewById(R.id.sensitivityEditText); algorithmDropdown.setSelection(oManager.getEvaluationAlgorithm()); nGraphsEditText.setText(String.format(Locale.ENGLISH, "%d", oManager.getGraphs())); pThresholdEditText.setText(String.format(Locale.ENGLISH, "%d", (int) (oManager.getThresholdP() * 100))); sensitivityEditText.setText(String.format(Locale.ENGLISH, "%d", (int) (oManager.getSensitivity() * 100))); int flag = oManager.getFlag(); setCheckBox(flag, OptionsManager.FLYINGTIMES, R.id.flyingTimesCheckBox); setCheckBox(flag, OptionsManager.ACCELERANCE, R.id.acceleratorCheckBox); setCheckBox(flag, OptionsManager.ORIENTATION, R.id.orientationCheckBox); setCheckBox(flag, OptionsManager.ERRORRATE, R.id.errorRateCheckBox); setCheckBox(flag, OptionsManager.LONGPRESSRATE, R.id.substitutionCheckBox); if (!ExtendedStorageHandler.checkFile(getResources().getString(R.string.BIOSEC_DATA))) { downloadPopup(getResources().getString(R.string.noSamples)); } else { new CheckNewVersionTask(this).execute(); } } /** * Updates view on different option selection. * * @author Stefan Smihla * */ private class ItemSelector implements AdapterView.OnItemSelectedListener { @Override public void onItemSelected(AdapterView<?> arg0, View arg1, int arg2, long arg3) { int algorithm = algorithmDropdown.getSelectedItemPosition(); if (algorithm == OptionsManager.T_TESTS) { pThresholdEditText.setEnabled(true); } else { pThresholdEditText.setEnabled(false); } oManager.setEvaluationAlgorithm(algorithm); sensitivityEditText.setText(String.format(Locale.ENGLISH, "%d", (int) (oManager.getSensitivity() * 100))); } @Override public void onNothingSelected(AdapterView<?> arg0) { } } /** * Saves evaluated results to file. */ public void saveResults() { try { analyser.saveToFile(getResources().getString(R.string.RESULTS_FILE)); new PopupBuilder(this, getResources().getString( R.string.infoDialogTitle), getResources().getString( R.string.fileSaved).replace("XXXX", getResources().getString(R.string.RESULTS_FILE)), getResources().getString(R.string.closeButtonLabel)) .createClosingPopup(); } catch (IOException e) { Log.w(UserLoggerManager.LOGNAME, e.getMessage(), e); new PopupBuilder(this, getResources().getString( R.string.infoDialogTitle), getResources().getString( R.string.savingError), getResources().getString( R.string.closeButtonLabel)).createClosingPopup(); } } /** * Sets checkbox according to flag value. * * @param flag * source flag * @param value * value to evaluate * @param rID * ID of checkbox */ private void setCheckBox(int flag, int value, int rID) { if ((flag & value) != 0) { ((CheckBox) findViewById(rID)).setChecked(true); } else { ((CheckBox) findViewById(rID)).setChecked(false); } } /** * Checks if checkbox is checked. * * @param rID * ID of checkbox * @return true if checked, otherwise false */ private boolean checkCheckBox(int rID) { return ((CheckBox) findViewById(rID)).isChecked(); } /** * Computes flag from checked checkboxes. * * @return computed flag */ private int computeFlag() { int flag = 0; if (checkCheckBox(R.id.flyingTimesCheckBox)) flag |= OptionsManager.FLYINGTIMES; if (checkCheckBox(R.id.acceleratorCheckBox)) flag |= OptionsManager.ACCELERANCE; if (checkCheckBox(R.id.orientationCheckBox)) flag |= OptionsManager.ORIENTATION; if (checkCheckBox(R.id.errorRateCheckBox)) flag |= OptionsManager.ERRORRATE; if (checkCheckBox(R.id.substitutionCheckBox)) flag |= OptionsManager.LONGPRESSRATE; return flag; } /** * Starts asynchronous analyze task. */ public void analyseSamples(){ if (!ExtendedStorageHandler.checkFile(getResources().getString(R.string.BIOSEC_DATA))) { downloadPopup(getResources().getString(R.string.noSamples)); } else { oManager.setEvaluationAlgorithm(algorithmDropdown .getSelectedItemPosition()); oManager.setGraphs(Integer.parseInt(nGraphsEditText.getText() .toString())); oManager.setThresholdP(Double.parseDouble(pThresholdEditText .getText().toString()) / 100); oManager.setSensitivity(Double.parseDouble(sensitivityEditText .getText().toString()) / 100); oManager.setFlag(computeFlag()); oManager.saveSettings(this); new AnalyserTask(this).execute(analyser); } } /** * Starts action from selected button. * * @param view * source view */ public final void onClick(View view) { switch (view.getId()) { case R.id.testButton: analyseSamples(); break; case R.id.viewSamplesButton: startActivity(new Intent(this, SamplesOverviewActivity.class)); break; } } /** * Starts download task on popup submission. */ public void popupSubmitted() { new DownloadTask(this).execute(); } /** * Shows visualized results. */ public void showResults() { Intent intent = new ChartBuilder(this).buildBarChart( getResources().getString(R.string.titleBar), getResources().getString(R.string.xLabelBar), getResources().getString(R.string.yLabelBar), analyser.getResults()); startActivity(intent); } /** * Opens download popup. * * @param message * custom content message */ public void downloadPopup(String message) { new PopupBuilder(this, getResources().getString( R.string.infoDialogTitle), message, getResources().getString( R.string.closeButtonLabel)).createAskPopup(getResources() .getString(R.string.download), "popupSubmitted"); } /** * Returns test button. This is used for asynchronous task to access button. * * @return test button */ public Button getTestButton() { return testButton; } /** * Returns view button. This is used for asynchronous task to access button. * * @return view button */ public Button getViewButton() { return viewButton; } /** * Returns progress bar. This is used for asynchronous task to access * button. * * @return loading bar */ public ProgressBar getProgressBar() { return loadingBar; } /** * Returns info text for asynchronous task to access. * * @return info text */ public TextView getInfoText() { return infoText; } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.main, menu); menu.findItem(R.id.saveResultsButton).setVisible(true); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { Intent intent = null; if (item.getItemId() == R.id.saveResultsButton) { saveResults(); } else { intent = ActionMenu.onOptionsItemSelected(this, item.getItemId()); } if (intent != null) { startActivity(intent); } return super.onOptionsItemSelected(item); } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.java.decompiler.modules.decompiler.stats; import org.jetbrains.java.decompiler.code.CodeConstants; import org.jetbrains.java.decompiler.main.DecompilerContext; import org.jetbrains.java.decompiler.main.TextBuffer; import org.jetbrains.java.decompiler.main.collectors.BytecodeMappingTracer; import org.jetbrains.java.decompiler.main.collectors.CounterContainer; import org.jetbrains.java.decompiler.modules.decompiler.DecHelper; import org.jetbrains.java.decompiler.modules.decompiler.ExprProcessor; import org.jetbrains.java.decompiler.modules.decompiler.StatEdge; import org.jetbrains.java.decompiler.modules.decompiler.exps.VarExprent; import org.jetbrains.java.decompiler.modules.decompiler.vars.VarProcessor; import org.jetbrains.java.decompiler.struct.gen.VarType; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; public class CatchAllStatement extends Statement { private Statement handler; private boolean isFinally; private VarExprent monitor; private final List<VarExprent> vars = new ArrayList<>(); // ***************************************************************************** // constructors // ***************************************************************************** private CatchAllStatement() { type = Statement.TYPE_CATCHALL; } private CatchAllStatement(Statement head, Statement handler) { this(); first = head; stats.addWithKey(head, head.id); this.handler = handler; stats.addWithKey(handler, handler.id); List<StatEdge> lstSuccs = head.getSuccessorEdges(STATEDGE_DIRECT_ALL); if (!lstSuccs.isEmpty()) { StatEdge edge = lstSuccs.get(0); if (edge.getType() == StatEdge.TYPE_REGULAR) { post = edge.getDestination(); } } vars.add(new VarExprent(DecompilerContext.getCounterContainer().getCounterAndIncrement(CounterContainer.VAR_COUNTER), new VarType(CodeConstants.TYPE_OBJECT, 0, "java/lang/Throwable"), (VarProcessor)DecompilerContext.getProperty(DecompilerContext.CURRENT_VAR_PROCESSOR))); } // ***************************************************************************** // public methods // ***************************************************************************** public static Statement isHead(Statement head) { if (head.getLastBasicType() != Statement.LASTBASICTYPE_GENERAL) { return null; } HashSet<Statement> setHandlers = DecHelper.getUniquePredExceptions(head); if (setHandlers.size() != 1) { return null; } for (StatEdge edge : head.getSuccessorEdges(StatEdge.TYPE_EXCEPTION)) { Statement exc = edge.getDestination(); if (edge.getExceptions() == null && setHandlers.contains(exc) && exc.getLastBasicType() == LASTBASICTYPE_GENERAL) { List<StatEdge> lstSuccs = exc.getSuccessorEdges(STATEDGE_DIRECT_ALL); if (lstSuccs.isEmpty() || lstSuccs.get(0).getType() != StatEdge.TYPE_REGULAR) { if (head.isMonitorEnter() || exc.isMonitorEnter()) { return null; } if (DecHelper.checkStatementExceptions(Arrays.asList(head, exc))) { return new CatchAllStatement(head, exc); } } } } return null; } public TextBuffer toJava(int indent, BytecodeMappingTracer tracer) { String new_line_separator = DecompilerContext.getNewLineSeparator(); TextBuffer buf = new TextBuffer(); buf.append(ExprProcessor.listToJava(varDefinitions, indent, tracer)); boolean labeled = isLabeled(); if (labeled) { buf.appendIndent(indent).append("label").append(this.id.toString()).append(":").appendLineSeparator(); tracer.incrementCurrentSourceLine(); } List<StatEdge> lstSuccs = first.getSuccessorEdges(STATEDGE_DIRECT_ALL); if (first.type == TYPE_TRYCATCH && first.varDefinitions.isEmpty() && isFinally && !labeled && !first.isLabeled() && (lstSuccs.isEmpty() || !lstSuccs.get(0).explicit)) { TextBuffer content = ExprProcessor.jmpWrapper(first, indent, true, tracer); content.setLength(content.length() - new_line_separator.length()); tracer.incrementCurrentSourceLine(-1); buf.append(content); } else { buf.appendIndent(indent).append("try {").appendLineSeparator(); tracer.incrementCurrentSourceLine(); buf.append(ExprProcessor.jmpWrapper(first, indent + 1, true, tracer)); buf.appendIndent(indent).append("}"); } buf.append(isFinally ? " finally" : " catch (" + vars.get(0).toJava(indent, tracer) + ")").append(" {").appendLineSeparator(); tracer.incrementCurrentSourceLine(); if (monitor != null) { buf.appendIndent(indent+1).append("if (").append(monitor.toJava(indent, tracer)).append(") {").appendLineSeparator(); tracer.incrementCurrentSourceLine(); } buf.append(ExprProcessor.jmpWrapper(handler, indent + 1 + (monitor != null ? 1 : 0), true, tracer)); if (monitor != null) { buf.appendIndent(indent + 1).append("}").appendLineSeparator(); tracer.incrementCurrentSourceLine(); } buf.appendIndent(indent).append("}").appendLineSeparator(); tracer.incrementCurrentSourceLine(); return buf; } public void replaceStatement(Statement oldstat, Statement newstat) { if (handler == oldstat) { handler = newstat; } super.replaceStatement(oldstat, newstat); } public Statement getSimpleCopy() { CatchAllStatement cas = new CatchAllStatement(); cas.isFinally = this.isFinally; if (this.monitor != null) { cas.monitor = new VarExprent(DecompilerContext.getCounterContainer().getCounterAndIncrement(CounterContainer.VAR_COUNTER), VarType.VARTYPE_INT, (VarProcessor)DecompilerContext.getProperty(DecompilerContext.CURRENT_VAR_PROCESSOR)); } if (!this.vars.isEmpty()) { // FIXME: WTF??? vars?! vars.add(new VarExprent(DecompilerContext.getCounterContainer().getCounterAndIncrement(CounterContainer.VAR_COUNTER), new VarType(CodeConstants.TYPE_OBJECT, 0, "java/lang/Throwable"), (VarProcessor)DecompilerContext.getProperty(DecompilerContext.CURRENT_VAR_PROCESSOR))); } return cas; } public void initSimpleCopy() { first = stats.get(0); handler = stats.get(1); } // ***************************************************************************** // getter and setter methods // ***************************************************************************** public Statement getHandler() { return handler; } public void setHandler(Statement handler) { this.handler = handler; } public boolean isFinally() { return isFinally; } public void setFinally(boolean isFinally) { this.isFinally = isFinally; } public VarExprent getMonitor() { return monitor; } public void setMonitor(VarExprent monitor) { this.monitor = monitor; } public List<VarExprent> getVars() { return vars; } }
/* * Copyright (c) 2018, The University of Memphis, MD2K Center of Excellence * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.md2k.datakit.logger; import android.content.ContentValues; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteQueryBuilder; import com.esotericsoftware.kryo.Kryo; import com.esotericsoftware.kryo.io.Input; import com.esotericsoftware.kryo.io.Output; import org.md2k.datakitapi.source.datasource.DataSource; import org.md2k.datakitapi.source.datasource.DataSourceClient; import org.md2k.datakitapi.status.Status; import org.md2k.datakitapi.time.DateTime; import org.md2k.utilities.Report.Log; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.util.ArrayList; import java.util.Arrays; /** * Class for defining a <code>DataSource</code> table within the database. */ public class DatabaseTable_DataSource { /** Name of the table. <p>Default is <code>"datasources"</code>.</p> */ private static String TABLE_NAME = "datasources"; /** Data stream identifier. */ private static String C_DS_ID = "ds_id"; /** Data source identifier. */ private static String C_DATASOURCE_ID = "datasource_id"; /** Data source type. */ private static String C_DATASOURCE_TYPE = "datasource_type"; /** Platform identifier. */ private static String C_PLATFORM_ID = "platform_id"; /** Platform type. */ private static String C_PLATFROM_TYPE = "platform_type"; /** Platform application identifier. */ private static String C_PLATFORMAPP_ID = "platformapp_id"; /** Platform application type. */ private static String C_PLATFROMAPP_TYPE = "platformapp_type"; /** Application identifier. */ private static String C_APPLICATION_ID = "application_id"; /** Application type. */ private static String C_APPLICATION_TYPE = "application_type"; /** Datetime of a new <code>DataSource</code> row creation. */ private static String C_CREATEDATETIME = "create_datetime"; /** <code>DataSource</code> */ private static String C_DATASOURCE = "datasource"; /** Database table creation command. */ private static final String SQL_CREATE_DATASOURCE = "CREATE TABLE IF NOT EXISTS " + TABLE_NAME + " (" + C_DS_ID + " INTEGER PRIMARY KEY autoincrement, " + C_DATASOURCE_ID + " TEXT, " + C_DATASOURCE_TYPE + " TEXT, " + C_PLATFORM_ID + " TEXT, " + C_PLATFROM_TYPE + " TEXT," + C_PLATFORMAPP_ID + " TEXT, " + C_PLATFROMAPP_TYPE + " TEXT," + C_APPLICATION_ID + " TEXT, " + C_APPLICATION_TYPE + " TEXT," + C_CREATEDATETIME + " LONG, " + C_DATASOURCE + " BLOB not null);"; /** * Creates a <code>DataSource</code> table in the database if one does not already exist. * * @param db database to check. */ DatabaseTable_DataSource(SQLiteDatabase db) { createIfNotExists(db); } /** * Check for available fields for the given <code>DataSource</code> and return a string of * valid selection arguments. * * @param dataSource <code>DataSource</code> to get arguments for. * @return A string of valid selection arguments. */ private synchronized String[] prepareSelectionArgs(DataSource dataSource) { ArrayList<String> selectionArgs = new ArrayList<>(); if (dataSource.getId() != null) selectionArgs.add(dataSource.getId()); if (dataSource.getType() != null) selectionArgs.add(dataSource.getType()); if (dataSource.getPlatform() != null && dataSource.getPlatform().getId() != null) selectionArgs.add(dataSource.getPlatform().getId()); if (dataSource.getPlatform() != null && dataSource.getPlatform().getType() != null) selectionArgs.add(dataSource.getPlatform().getType()); if (dataSource.getPlatformApp() != null && dataSource.getPlatformApp().getId() != null) selectionArgs.add(dataSource.getPlatformApp().getId()); if (dataSource.getPlatformApp() != null && dataSource.getPlatformApp().getType() != null) selectionArgs.add(dataSource.getPlatformApp().getType()); if (dataSource.getApplication() != null && dataSource.getApplication().getId() != null) selectionArgs.add(dataSource.getApplication().getId()); if (dataSource.getApplication() != null && dataSource.getApplication().getType() != null) selectionArgs.add(dataSource.getApplication().getType()); if (selectionArgs.size() == 0) return null; return selectionArgs.toArray(new String[selectionArgs.size()]); } /** * Constructs a selection string for the given <code>DataSource</code>. * * @param dataSource <code>DataSource</code> to select for. * @return A valid selection string for the given <code>DataSource</code>. */ private synchronized String prepareSelection(DataSource dataSource) { String selection = ""; if (dataSource.getId() != null) { if (!selection.equals("")) selection += " AND "; selection += C_DATASOURCE_ID + "=?"; } if (dataSource.getType() != null) { if (!selection.equals("")) selection += " AND "; selection += C_DATASOURCE_TYPE + "=?"; } if (dataSource.getPlatform() != null && dataSource.getPlatform().getId() != null) { if (!selection.equals("")) selection += " AND "; selection += C_PLATFORM_ID + "=?"; } if (dataSource.getPlatform() != null && dataSource.getPlatform().getType() != null) { if (!selection.equals("")) selection += " AND "; selection += C_PLATFROM_TYPE + "=?"; } if (dataSource.getPlatformApp() != null && dataSource.getPlatformApp().getId() != null) { if (!selection.equals("")) selection += " AND "; selection += C_PLATFORMAPP_ID + "=?"; } if (dataSource.getPlatformApp() != null && dataSource.getPlatformApp().getType() != null) { if (!selection.equals("")) selection += " AND "; selection += C_PLATFROMAPP_TYPE + "=?"; } if (dataSource.getApplication() != null && dataSource.getApplication().getId() != null) { if (!selection.equals("")) selection += " AND "; selection += C_APPLICATION_ID + "=?"; } if (dataSource.getApplication() != null && dataSource.getApplication().getType() != null) { if (!selection.equals("")) selection += " AND "; selection += C_APPLICATION_TYPE + "=?"; } if (selection.equals("")) return null; return selection; } /** * Creates a <code>DataSource</code> table if one does not already exist. * * @param db Database to create the table in. */ public synchronized void createIfNotExists(SQLiteDatabase db) { db.execSQL(SQL_CREATE_DATASOURCE); } /** * Constructs a database query for finding the given <code>DataSource</code> rows. * * @param db Database to query. * @param dataSource <code>DataSource</code> to query for. * @return ArrayList of <code>DataSource</code> table rows that match the query. */ public synchronized ArrayList<DataSourceClient> findDataSource(SQLiteDatabase db, DataSource dataSource) { ArrayList<DataSourceClient> dataSourceClients = new ArrayList<>(); SQLiteQueryBuilder queryBuilder = new SQLiteQueryBuilder(); queryBuilder.setTables(TABLE_NAME); String[] columns = new String[]{C_DS_ID, C_DATASOURCE}; String selection = prepareSelection(dataSource); String[] selectionArgs = prepareSelectionArgs(dataSource); Cursor mCursor = db.query(TABLE_NAME, columns, selection, selectionArgs, null, null, null); try { if (mCursor.moveToFirst()) { do { byte[] bytes = mCursor.getBlob(mCursor.getColumnIndex(C_DATASOURCE)); DataSource curDataSource = fromBytes(bytes); DataSourceClient dataSourceClient = new DataSourceClient(mCursor.getInt(mCursor.getColumnIndex(C_DS_ID)), curDataSource, new Status(Status.DATASOURCE_EXIST)); dataSourceClients.add(dataSourceClient); } while (mCursor.moveToNext()); } mCursor.close(); } catch (Exception ignored){} return dataSourceClients; } /** * Removes the <code>DataSource</code> table of the given database. * * @param db Database to remove the table from. */ public synchronized void removeAll(SQLiteDatabase db) { db.execSQL("DROP TABLE IF EXISTS " + TABLE_NAME); } /** * Registers a new row in the database for the given <code>DataSource</code>. * * @param db Database to add the new row to. * @param dataSource <code>DataSource</code> to add a row for. * @return The registered <code>DataSourceClient</code>. */ public synchronized DataSourceClient register(SQLiteDatabase db, DataSource dataSource) { ContentValues cValues = prepareDataSource(dataSource); int newRowId; newRowId = (int) db.insert(TABLE_NAME, null, cValues); if (newRowId == -1) { return new DataSourceClient(-1, dataSource, new Status(Status.INTERNAL_ERROR)); } else return new DataSourceClient(newRowId, dataSource, new Status(Status.SUCCESS)); } /** * Adds information about the <code>DataSource</code> to the <code>ContentValues</code> set. * * @param dataSource <code>DataSource</code> to prepare. * @return The <code>ContentValues</code> set containing the <code>DataSource</code>. */ public synchronized ContentValues prepareDataSource(DataSource dataSource) { byte[] dataSourceArray = toBytes(dataSource); long curTime = DateTime.getDateTime(); ContentValues cValues = new ContentValues(); if (dataSource.getId() != null) cValues.put(C_DATASOURCE_ID, dataSource.getId()); if (dataSource.getType() != null) cValues.put(C_DATASOURCE_TYPE, dataSource.getType()); if (dataSource.getPlatform() != null && dataSource.getPlatform().getId() != null) cValues.put(C_PLATFORM_ID, dataSource.getPlatform().getId()); if (dataSource.getPlatform() != null && dataSource.getPlatform().getType() != null) cValues.put(C_PLATFROM_TYPE, dataSource.getPlatform().getType()); if (dataSource.getPlatformApp() != null && dataSource.getPlatformApp().getId() != null) cValues.put(C_PLATFORMAPP_ID, dataSource.getPlatformApp().getId()); if (dataSource.getPlatformApp() != null && dataSource.getPlatformApp().getType() != null) cValues.put(C_PLATFROMAPP_TYPE, dataSource.getPlatformApp().getType()); if (dataSource.getApplication() != null && dataSource.getApplication().getId() != null) cValues.put(C_APPLICATION_ID, dataSource.getApplication().getId()); if (dataSource.getApplication() != null && dataSource.getApplication().getType() != null) cValues.put(C_APPLICATION_TYPE, dataSource.getApplication().getType()); cValues.put(C_CREATEDATETIME, curTime); cValues.put(C_DATASOURCE, dataSourceArray); return cValues; } /** * Converts a <code>DataSource</code> to a byte array. * * @param dataSource <code>DataSource</code> to convert. * @return The resulting byte array. */ private synchronized byte[] toBytes(DataSource dataSource) { Kryo kryo = new Kryo(); byte[] bytes; ByteArrayOutputStream baos = new ByteArrayOutputStream(); Output output = new Output(baos); kryo.writeClassAndObject(output, dataSource); output.close(); bytes = baos.toByteArray(); return bytes; } /** * Reads a <code>DataSource</code> from a byte array that contains it. * * @param bytes Array to read. * @return The <code>DataSource</code> that was stored in the array. */ private synchronized DataSource fromBytes(byte[] bytes) { Kryo kryo = new Kryo(); Input input = new Input(new ByteArrayInputStream(bytes)); DataSource curDataSource = (DataSource) kryo.readClassAndObject(input); input.close(); return curDataSource; } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.util; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.Writable; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.nio.ByteBuffer; import java.text.NumberFormat; import java.util.Random; /** * Implements a <i>Bloom filter</i>, as defined by Bloom in 1970. * <p> * The Bloom filter is a data structure that was introduced in 1970 and that has * been adopted by the networking research community in the past decade thanks * to the bandwidth efficiencies that it offers for the transmission of set * membership information between networked hosts. A sender encodes the * information into a bit vector, the Bloom filter, that is more compact than a * conventional representation. Computation and space costs for construction are * linear in the number of elements. The receiver uses the filter to test * whether various elements are members of the set. Though the filter will * occasionally return a false positive, it will never return a false negative. * When creating the filter, the sender can choose its desired point in a * trade-off between the false positive rate and the size. * * <p> * Originally inspired by <a href="http://www.one-lab.org">European Commission * One-Lab Project 034819</a>. * * Bloom filters are very sensitive to the number of elements inserted into * them. For HBase, the number of entries depends on the size of the data stored * in the column. Currently the default region size is 256MB, so entry count ~= * 256MB / (average value size for column). Despite this rule of thumb, there is * no efficient way to calculate the entry count after compactions. Therefore, * it is often easier to use a dynamic bloom filter that will add extra space * instead of allowing the error rate to grow. * * ( http://www.eecs.harvard.edu/~michaelm/NEWWORK/postscripts/BloomFilterSurvey * .pdf ) * * m denotes the number of bits in the Bloom filter (bitSize) n denotes the * number of elements inserted into the Bloom filter (maxKeys) k represents the * number of hash functions used (nbHash) e represents the desired false * positive rate for the bloom (err) * * If we fix the error rate (e) and know the number of entries, then the optimal * bloom size m = -(n * ln(err) / (ln(2)^2) ~= n * ln(err) / ln(0.6185) * * The probability of false positives is minimized when k = m/n ln(2). * * @see BloomFilter The general behavior of a filter * * @see <a * href="http://portal.acm.org/citation.cfm?id=362692&dl=ACM&coll=portal"> * Space/Time Trade-Offs in Hash Coding with Allowable Errors</a> */ @InterfaceAudience.Private public class ByteBloomFilter implements BloomFilter, BloomFilterWriter { /** Current file format version */ public static final int VERSION = 1; /** Bytes (B) in the array. This actually has to fit into an int. */ protected long byteSize; /** Number of hash functions */ protected int hashCount; /** Hash type */ protected final int hashType; /** Hash Function */ protected final Hash hash; /** Keys currently in the bloom */ protected int keyCount; /** Max Keys expected for the bloom */ protected int maxKeys; /** Bloom bits */ protected ByteBuffer bloom; /** Record separator for the Bloom filter statistics human-readable string */ public static final String STATS_RECORD_SEP = "; "; /** * Used in computing the optimal Bloom filter size. This approximately equals * 0.480453. */ public static final double LOG2_SQUARED = Math.log(2) * Math.log(2); /** * A random number generator to use for "fake lookups" when testing to * estimate the ideal false positive rate. */ private static Random randomGeneratorForTest; /** Bit-value lookup array to prevent doing the same work over and over */ private static final byte [] bitvals = { (byte) 0x01, (byte) 0x02, (byte) 0x04, (byte) 0x08, (byte) 0x10, (byte) 0x20, (byte) 0x40, (byte) 0x80 }; /** * Loads bloom filter meta data from file input. * @param meta stored bloom meta data * @throws IllegalArgumentException meta data is invalid */ public ByteBloomFilter(DataInput meta) throws IOException, IllegalArgumentException { this.byteSize = meta.readInt(); this.hashCount = meta.readInt(); this.hashType = meta.readInt(); this.keyCount = meta.readInt(); this.maxKeys = this.keyCount; this.hash = Hash.getInstance(this.hashType); if (hash == null) { throw new IllegalArgumentException("Invalid hash type: " + hashType); } sanityCheck(); } /** * @param maxKeys * @param errorRate * @return the number of bits for a Bloom filter than can hold the given * number of keys and provide the given error rate, assuming that the * optimal number of hash functions is used and it does not have to * be an integer. */ public static long computeBitSize(long maxKeys, double errorRate) { return (long) Math.ceil(maxKeys * (-Math.log(errorRate) / LOG2_SQUARED)); } /** * The maximum number of keys we can put into a Bloom filter of a certain * size to maintain the given error rate, assuming the number of hash * functions is chosen optimally and does not even have to be an integer * (hence the "ideal" in the function name). * * @param bitSize * @param errorRate * @return maximum number of keys that can be inserted into the Bloom filter * @see #computeMaxKeys(long, double, int) for a more precise estimate */ public static long idealMaxKeys(long bitSize, double errorRate) { // The reason we need to use floor here is that otherwise we might put // more keys in a Bloom filter than is allowed by the target error rate. return (long) (bitSize * (LOG2_SQUARED / -Math.log(errorRate))); } /** * The maximum number of keys we can put into a Bloom filter of a certain * size to get the given error rate, with the given number of hash functions. * * @param bitSize * @param errorRate * @param hashCount * @return the maximum number of keys that can be inserted in a Bloom filter * to maintain the target error rate, if the number of hash functions * is provided. */ public static long computeMaxKeys(long bitSize, double errorRate, int hashCount) { return (long) (-bitSize * 1.0 / hashCount * Math.log(1 - Math.exp(Math.log(errorRate) / hashCount))); } /** * Computes the error rate for this Bloom filter, taking into account the * actual number of hash functions and keys inserted. The return value of * this function changes as a Bloom filter is being populated. Used for * reporting the actual error rate of compound Bloom filters when writing * them out. * * @return error rate for this particular Bloom filter */ public double actualErrorRate() { return actualErrorRate(keyCount, byteSize * 8, hashCount); } /** * Computes the actual error rate for the given number of elements, number * of bits, and number of hash functions. Taken directly from the * <a href= * "http://en.wikipedia.org/wiki/Bloom_filter#Probability_of_false_positives" * > Wikipedia Bloom filter article</a>. * * @param maxKeys * @param bitSize * @param functionCount * @return the actual error rate */ public static double actualErrorRate(long maxKeys, long bitSize, int functionCount) { return Math.exp(Math.log(1 - Math.exp(-functionCount * maxKeys * 1.0 / bitSize)) * functionCount); } /** * Increases the given byte size of a Bloom filter until it can be folded by * the given factor. * * @param bitSize * @param foldFactor * @return Foldable byte size */ public static int computeFoldableByteSize(long bitSize, int foldFactor) { long byteSizeLong = (bitSize + 7) / 8; int mask = (1 << foldFactor) - 1; if ((mask & byteSizeLong) != 0) { byteSizeLong >>= foldFactor; ++byteSizeLong; byteSizeLong <<= foldFactor; } if (byteSizeLong > Integer.MAX_VALUE) { throw new IllegalArgumentException("byteSize=" + byteSizeLong + " too " + "large for bitSize=" + bitSize + ", foldFactor=" + foldFactor); } return (int) byteSizeLong; } private static int optimalFunctionCount(int maxKeys, long bitSize) { long i = bitSize / maxKeys; double result = Math.ceil(Math.log(2) * i); if (result > Integer.MAX_VALUE){ throw new IllegalArgumentException("result too large for integer value."); } return (int)result; } /** Private constructor used by other constructors. */ private ByteBloomFilter(int hashType) { this.hashType = hashType; this.hash = Hash.getInstance(hashType); } /** * Determines & initializes bloom filter meta data from user config. Call * {@link #allocBloom()} to allocate bloom filter data. * * @param maxKeys Maximum expected number of keys that will be stored in this * bloom * @param errorRate Desired false positive error rate. Lower rate = more * storage required * @param hashType Type of hash function to use * @param foldFactor When finished adding entries, you may be able to 'fold' * this bloom to save space. Tradeoff potentially excess bytes in * bloom for ability to fold if keyCount is exponentially greater * than maxKeys. * @throws IllegalArgumentException */ public ByteBloomFilter(int maxKeys, double errorRate, int hashType, int foldFactor) throws IllegalArgumentException { this(hashType); long bitSize = computeBitSize(maxKeys, errorRate); hashCount = optimalFunctionCount(maxKeys, bitSize); this.maxKeys = maxKeys; // increase byteSize so folding is possible byteSize = computeFoldableByteSize(bitSize, foldFactor); sanityCheck(); } /** * Creates a Bloom filter of the given size. * * @param byteSizeHint the desired number of bytes for the Bloom filter bit * array. Will be increased so that folding is possible. * @param errorRate target false positive rate of the Bloom filter * @param hashType Bloom filter hash function type * @param foldFactor * @return the new Bloom filter of the desired size */ public static ByteBloomFilter createBySize(int byteSizeHint, double errorRate, int hashType, int foldFactor) { ByteBloomFilter bbf = new ByteBloomFilter(hashType); bbf.byteSize = computeFoldableByteSize(byteSizeHint * 8L, foldFactor); long bitSize = bbf.byteSize * 8; bbf.maxKeys = (int) idealMaxKeys(bitSize, errorRate); bbf.hashCount = optimalFunctionCount(bbf.maxKeys, bitSize); // Adjust max keys to bring error rate closer to what was requested, // because byteSize was adjusted to allow for folding, and hashCount was // rounded. bbf.maxKeys = (int) computeMaxKeys(bitSize, errorRate, bbf.hashCount); return bbf; } /** * Creates another similar Bloom filter. Does not copy the actual bits, and * sets the new filter's key count to zero. * * @return a Bloom filter with the same configuration as this */ public ByteBloomFilter createAnother() { ByteBloomFilter bbf = new ByteBloomFilter(hashType); bbf.byteSize = byteSize; bbf.hashCount = hashCount; bbf.maxKeys = maxKeys; return bbf; } @Override public void allocBloom() { if (this.bloom != null) { throw new IllegalArgumentException("can only create bloom once."); } this.bloom = ByteBuffer.allocate((int)this.byteSize); assert this.bloom.hasArray(); } void sanityCheck() throws IllegalArgumentException { if(0 >= this.byteSize || this.byteSize > Integer.MAX_VALUE) { throw new IllegalArgumentException("Invalid byteSize: " + this.byteSize); } if(this.hashCount <= 0) { throw new IllegalArgumentException("Hash function count must be > 0"); } if (this.hash == null) { throw new IllegalArgumentException("hashType must be known"); } if (this.keyCount < 0) { throw new IllegalArgumentException("must have positive keyCount"); } } void bloomCheck(ByteBuffer bloom) throws IllegalArgumentException { if (this.byteSize != bloom.limit()) { throw new IllegalArgumentException( "Configured bloom length should match actual length"); } } public void add(byte [] buf) { add(buf, 0, buf.length); } @Override public void add(byte [] buf, int offset, int len) { /* * For faster hashing, use combinatorial generation * http://www.eecs.harvard.edu/~kirsch/pubs/bbbf/esa06.pdf */ int hash1 = this.hash.hash(buf, offset, len, 0); int hash2 = this.hash.hash(buf, offset, len, hash1); for (int i = 0; i < this.hashCount; i++) { long hashLoc = Math.abs((hash1 + i * hash2) % (this.byteSize * 8)); set(hashLoc); } ++this.keyCount; } /** Should only be used in tests */ boolean contains(byte [] buf) { return contains(buf, 0, buf.length, this.bloom); } /** Should only be used in tests */ boolean contains(byte [] buf, int offset, int length) { return contains(buf, offset, length, bloom); } /** Should only be used in tests */ boolean contains(byte[] buf, ByteBuffer bloom) { return contains(buf, 0, buf.length, bloom); } @Override public boolean contains(byte[] buf, int offset, int length, ByteBuffer theBloom) { if (theBloom == null) { // In a version 1 HFile Bloom filter data is stored in a separate meta // block which is loaded on demand, but in version 2 it is pre-loaded. // We want to use the same API in both cases. theBloom = bloom; } if (theBloom.limit() != byteSize) { throw new IllegalArgumentException("Bloom does not match expected size:" + " theBloom.limit()=" + theBloom.limit() + ", byteSize=" + byteSize); } return contains(buf, offset, length, theBloom.array(), theBloom.arrayOffset(), (int) byteSize, hash, hashCount); } public static boolean contains(byte[] buf, int offset, int length, byte[] bloomArray, int bloomOffset, int bloomSize, Hash hash, int hashCount) { int hash1 = hash.hash(buf, offset, length, 0); int hash2 = hash.hash(buf, offset, length, hash1); int bloomBitSize = bloomSize << 3; if (randomGeneratorForTest == null) { // Production mode. int compositeHash = hash1; for (int i = 0; i < hashCount; i++) { int hashLoc = Math.abs(compositeHash % bloomBitSize); compositeHash += hash2; if (!get(hashLoc, bloomArray, bloomOffset)) { return false; } } } else { // Test mode with "fake lookups" to estimate "ideal false positive rate". for (int i = 0; i < hashCount; i++) { int hashLoc = randomGeneratorForTest.nextInt(bloomBitSize); if (!get(hashLoc, bloomArray, bloomOffset)){ return false; } } } return true; } //--------------------------------------------------------------------------- /** Private helpers */ /** * Set the bit at the specified index to 1. * * @param pos index of bit */ void set(long pos) { int bytePos = (int)(pos / 8); int bitPos = (int)(pos % 8); byte curByte = bloom.get(bytePos); curByte |= bitvals[bitPos]; bloom.put(bytePos, curByte); } /** * Check if bit at specified index is 1. * * @param pos index of bit * @return true if bit at specified index is 1, false if 0. */ static boolean get(int pos, byte[] bloomArray, int bloomOffset) { int bytePos = pos >> 3; //pos / 8 int bitPos = pos & 0x7; //pos % 8 byte curByte = bloomArray[bloomOffset + bytePos]; curByte &= bitvals[bitPos]; return (curByte != 0); } @Override public long getKeyCount() { return keyCount; } @Override public long getMaxKeys() { return maxKeys; } @Override public long getByteSize() { return byteSize; } public int getHashType() { return hashType; } @Override public void compactBloom() { // see if the actual size is exponentially smaller than expected. if (this.keyCount > 0 && this.bloom.hasArray()) { int pieces = 1; int newByteSize = (int)this.byteSize; int newMaxKeys = this.maxKeys; // while exponentially smaller & folding is lossless while ( (newByteSize & 1) == 0 && newMaxKeys > (this.keyCount<<1) ) { pieces <<= 1; newByteSize >>= 1; newMaxKeys >>= 1; } // if we should fold these into pieces if (pieces > 1) { byte[] array = this.bloom.array(); int start = this.bloom.arrayOffset(); int end = start + newByteSize; int off = end; for(int p = 1; p < pieces; ++p) { for(int pos = start; pos < end; ++pos) { array[pos] |= array[off++]; } } // folding done, only use a subset of this array this.bloom.rewind(); this.bloom.limit(newByteSize); this.bloom = this.bloom.slice(); this.byteSize = newByteSize; this.maxKeys = newMaxKeys; } } } //--------------------------------------------------------------------------- /** * Writes just the bloom filter to the output array * @param out OutputStream to place bloom * @throws IOException Error writing bloom array */ public void writeBloom(final DataOutput out) throws IOException { if (!this.bloom.hasArray()) { throw new IOException("Only writes ByteBuffer with underlying array."); } out.write(bloom.array(), bloom.arrayOffset(), bloom.limit()); } @Override public Writable getMetaWriter() { return new MetaWriter(); } @Override public Writable getDataWriter() { return new DataWriter(); } private class MetaWriter implements Writable { protected MetaWriter() {} @Override public void readFields(DataInput arg0) throws IOException { throw new IOException("Cant read with this class."); } @Override public void write(DataOutput out) throws IOException { out.writeInt(VERSION); out.writeInt((int) byteSize); out.writeInt(hashCount); out.writeInt(hashType); out.writeInt(keyCount); } } private class DataWriter implements Writable { protected DataWriter() {} @Override public void readFields(DataInput arg0) throws IOException { throw new IOException("Cant read with this class."); } @Override public void write(DataOutput out) throws IOException { writeBloom(out); } } public int getHashCount() { return hashCount; } @Override public boolean supportsAutoLoading() { return bloom != null; } public static void setFakeLookupMode(boolean enabled) { if (enabled) { randomGeneratorForTest = new Random(283742987L); } else { randomGeneratorForTest = null; } } /** * {@inheritDoc} * Just concatenate row and column by default. May return the original row * buffer if the column qualifier is empty. */ @Override public byte[] createBloomKey(byte[] rowBuf, int rowOffset, int rowLen, byte[] qualBuf, int qualOffset, int qualLen) { // Optimize the frequent case when only the row is provided. if (qualLen <= 0 && rowOffset == 0 && rowLen == rowBuf.length) return rowBuf; byte [] result = new byte[rowLen + qualLen]; System.arraycopy(rowBuf, rowOffset, result, 0, rowLen); if (qualLen > 0) System.arraycopy(qualBuf, qualOffset, result, rowLen, qualLen); return result; } @Override public RawComparator<byte[]> getComparator() { return Bytes.BYTES_RAWCOMPARATOR; } /** * A human-readable string with statistics for the given Bloom filter. * * @param bloomFilter the Bloom filter to output statistics for; * @return a string consisting of "&lt;key&gt;: &lt;value&gt;" parts * separated by {@link #STATS_RECORD_SEP}. */ public static String formatStats(BloomFilterBase bloomFilter) { StringBuilder sb = new StringBuilder(); long k = bloomFilter.getKeyCount(); long m = bloomFilter.getMaxKeys(); sb.append("BloomSize: " + bloomFilter.getByteSize() + STATS_RECORD_SEP); sb.append("No of Keys in bloom: " + k + STATS_RECORD_SEP); sb.append("Max Keys for bloom: " + m); if (m > 0) { sb.append(STATS_RECORD_SEP + "Percentage filled: " + NumberFormat.getPercentInstance().format(k * 1.0 / m)); } return sb.toString(); } @Override public String toString() { return formatStats(this) + STATS_RECORD_SEP + "Actual error rate: " + String.format("%.8f", actualErrorRate()); } }
/* Generated By:JavaCC: Do not edit this line. Parser.java */ // Copyright (c) 2013 Mikhail Afanasov and DeepSe group. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package parsers.makefile; public class Parser implements ParserConstants { private MakeFile _file = new MakeFile(); public MakeFile getParsedFile() { return _file; } final public void parse() throws ParseException { Token componentName; label_1: while (true) { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case COMPONENT: case PFLAGS: case INCLUDE: ; break; default: jj_la1[0] = jj_gen; break label_1; } switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case COMPONENT: jj_consume_token(COMPONENT); jj_consume_token(EQUALS); componentName = jj_consume_token(NAME); _file.componentName = componentName.image; break; case INCLUDE: jj_consume_token(INCLUDE); componentName = jj_consume_token(VARNAME); break; case PFLAGS: jj_consume_token(PFLAGS); jj_consume_token(ADD); label_2: while (true) { switch ((jj_ntk==-1)?jj_ntk():jj_ntk) { case INCLUDEFLAG: ; break; default: jj_la1[1] = jj_gen; break label_2; } jj_consume_token(INCLUDEFLAG); componentName = jj_consume_token(DIRECTORY); _file.paths.add(componentName.image.split("/")); } break; default: jj_la1[2] = jj_gen; jj_consume_token(-1); throw new ParseException(); } } } /** Generated Token Manager. */ public ParserTokenManager token_source; SimpleCharStream jj_input_stream; /** Current token. */ public Token token; /** Next token. */ public Token jj_nt; private int jj_ntk; private int jj_gen; final private int[] jj_la1 = new int[3]; static private int[] jj_la1_0; static { jj_la1_init_0(); } private static void jj_la1_init_0() { jj_la1_0 = new int[] {0x860,0x200,0x860,}; } /** Constructor with InputStream. */ public Parser(java.io.InputStream stream) { this(stream, null); } /** Constructor with InputStream and supplied encoding */ public Parser(java.io.InputStream stream, String encoding) { try { jj_input_stream = new SimpleCharStream(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } token_source = new ParserTokenManager(jj_input_stream); token = new Token(); jj_ntk = -1; jj_gen = 0; for (int i = 0; i < 3; i++) jj_la1[i] = -1; } /** Reinitialise. */ public void ReInit(java.io.InputStream stream) { ReInit(stream, null); } /** Reinitialise. */ public void ReInit(java.io.InputStream stream, String encoding) { try { jj_input_stream.ReInit(stream, encoding, 1, 1); } catch(java.io.UnsupportedEncodingException e) { throw new RuntimeException(e); } token_source.ReInit(jj_input_stream); token = new Token(); jj_ntk = -1; jj_gen = 0; for (int i = 0; i < 3; i++) jj_la1[i] = -1; } /** Constructor. */ public Parser(java.io.Reader stream) { jj_input_stream = new SimpleCharStream(stream, 1, 1); token_source = new ParserTokenManager(jj_input_stream); token = new Token(); jj_ntk = -1; jj_gen = 0; for (int i = 0; i < 3; i++) jj_la1[i] = -1; } /** Reinitialise. */ public void ReInit(java.io.Reader stream) { jj_input_stream.ReInit(stream, 1, 1); token_source.ReInit(jj_input_stream); token = new Token(); jj_ntk = -1; jj_gen = 0; for (int i = 0; i < 3; i++) jj_la1[i] = -1; } /** Constructor with generated Token Manager. */ public Parser(ParserTokenManager tm) { token_source = tm; token = new Token(); jj_ntk = -1; jj_gen = 0; for (int i = 0; i < 3; i++) jj_la1[i] = -1; } /** Reinitialise. */ public void ReInit(ParserTokenManager tm) { token_source = tm; token = new Token(); jj_ntk = -1; jj_gen = 0; for (int i = 0; i < 3; i++) jj_la1[i] = -1; } private Token jj_consume_token(int kind) throws ParseException { Token oldToken; if ((oldToken = token).next != null) token = token.next; else token = token.next = token_source.getNextToken(); jj_ntk = -1; if (token.kind == kind) { jj_gen++; return token; } token = oldToken; jj_kind = kind; throw generateParseException(); } /** Get the next Token. */ final public Token getNextToken() { if (token.next != null) token = token.next; else token = token.next = token_source.getNextToken(); jj_ntk = -1; jj_gen++; return token; } /** Get the specific Token. */ final public Token getToken(int index) { Token t = token; for (int i = 0; i < index; i++) { if (t.next != null) t = t.next; else t = t.next = token_source.getNextToken(); } return t; } private int jj_ntk() { if ((jj_nt=token.next) == null) return (jj_ntk = (token.next=token_source.getNextToken()).kind); else return (jj_ntk = jj_nt.kind); } private java.util.List<int[]> jj_expentries = new java.util.ArrayList<int[]>(); private int[] jj_expentry; private int jj_kind = -1; /** Generate ParseException. */ public ParseException generateParseException() { jj_expentries.clear(); boolean[] la1tokens = new boolean[14]; if (jj_kind >= 0) { la1tokens[jj_kind] = true; jj_kind = -1; } for (int i = 0; i < 3; i++) { if (jj_la1[i] == jj_gen) { for (int j = 0; j < 32; j++) { if ((jj_la1_0[i] & (1<<j)) != 0) { la1tokens[j] = true; } } } } for (int i = 0; i < 14; i++) { if (la1tokens[i]) { jj_expentry = new int[1]; jj_expentry[0] = i; jj_expentries.add(jj_expentry); } } int[][] exptokseq = new int[jj_expentries.size()][]; for (int i = 0; i < jj_expentries.size(); i++) { exptokseq[i] = jj_expentries.get(i); } return new ParseException(token, exptokseq, tokenImage); } /** Enable tracing. */ final public void enable_tracing() { } /** Disable tracing. */ final public void disable_tracing() { } }
/** Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright 2011 Eric Haddad Koenig Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.all.landownloader; import static com.all.landownloader.LanDownloadMessageType.CANCEL_TRANSFER; import static com.all.landownloader.LanDownloadMessageType.CHUNK_TRANSFER; import static com.all.landownloader.LanDownloadMessageType.PAUSE_TRANSFER; import static com.all.landownloader.LanDownloadMessageType.RESUME_TRANSFER; import static com.all.landownloader.LanDownloadMessageType.START_TRANSFER; import static com.all.landownloader.LanDownloadMessageType.TRACK_REQUEST; import static com.all.landownloader.LanDownloadMessageType.TRACK_RESPONSE; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.io.File; import java.util.ArrayList; import java.util.List; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import com.all.downloader.download.ManagedDownloaderConfig; import com.all.landownloader.LanDownloader.LanDownload; import com.all.landownloader.LanDownloader.LanDownloadFactory; import com.all.landownloader.LanDownloader.LanTransfer; import com.all.landownloader.LanDownloader.LanTransferFactory; import com.all.shared.download.TrackProvider; import com.all.shared.model.Track; import com.all.testing.MockInyectRunner; import com.all.testing.UnderTest; @RunWith(MockInyectRunner.class) public class TestLanDownloader { @UnderTest private LanDownloader lanDownloader; @Mock private ManagedDownloaderConfig config; @Mock private TrackProvider trackProvider; @Mock private LanDownloadFactory downloadFactory; @Mock private LanTransferFactory transferFactory; @Mock private LanNetworkingService networkingService; // Aux mocks @Mock private LanDownload download; @Mock private LanTransfer transfer; @Mock private Track track; @Mock private File trackFile; private String downloadId = "00a9ae41a50cfece357f26e786db6fa014af765b"; private String email = "seeder@all.com"; private String address = "192.168.1.27"; @Before public void setup() { List<String> hashcodes = new ArrayList<String>(); hashcodes.add("1234567890"); hashcodes.add("0987654321"); when(config.getUserId()).thenReturn(email); lanDownloader.init(); when(trackProvider.getTrack(downloadId)).thenReturn(track); when(trackProvider.getFile(downloadId)).thenReturn(trackFile); when(track.getHashcode()).thenReturn(downloadId); when(downloadFactory.createDownload(eq(track), anyBoolean())).thenReturn(download); when(transferFactory.createTransfer(eq(downloadId), eq(trackFile), anyString())).thenReturn(transfer); } @After public void shudown(){ lanDownloader.shutdown(); } @Test public void shouldCancelADownload() throws Exception { lanDownloader.download(downloadId); lanDownloader.delete(downloadId); verify(download).cancel(); } @Test public void shouldReturnDownloadStatus() throws Exception { assertNull(lanDownloader.getStatus(downloadId)); lanDownloader.download(downloadId); lanDownloader.getStatus(downloadId); verify(download).getStatus(); } @Test public void shouldPauseDownload() throws Exception { lanDownloader.download(downloadId); lanDownloader.pause(downloadId); verify(download).pause(); } @Test public void shouldResumeDownload() throws Exception { lanDownloader.download(downloadId); lanDownloader.resume(downloadId); verify(download).resume(); } @Test public void shouldAddSeederToCurrentDownloadOnTrackResponse() throws Exception { lanDownloader.download(downloadId); LanDownloaderMessage request = new LanDownloaderMessage(address, TRACK_RESPONSE, downloadId); request.setBody(Boolean.toString(true)); lanDownloader.onMessage(request); verify(download).addSeederResponse(address, true); } @Test public void shouldAddChunkToCurrentDownloadOnChunkTransferred() throws Exception { lanDownloader.download(downloadId); LanDownloaderMessage request = new LanDownloaderMessage(address, CHUNK_TRANSFER, downloadId); String chunk = "encodedChunk"; request.setBody(chunk); lanDownloader.onMessage(request); verify(download).addChunk(chunk); } @Test public void shouldStartTransferAsLeecherRequest() throws Exception { startTransfer(); verify(transferFactory).createTransfer(downloadId, trackFile, address); } private void startTransfer() { LanDownloaderMessage request = new LanDownloaderMessage(address, START_TRANSFER, downloadId); lanDownloader.onMessage(request); } @Test public void shouldPauseCurrentTransferOnRequest() throws Exception { startTransfer(); LanDownloaderMessage request = new LanDownloaderMessage(address, PAUSE_TRANSFER, downloadId); lanDownloader.onMessage(request); verify(transfer).pause(); } @Test public void shouldResumeCurrentTransferOnRequest() throws Exception { startTransfer(); LanDownloaderMessage request = new LanDownloaderMessage(address, RESUME_TRANSFER, downloadId); lanDownloader.onMessage(request); verify(transfer).resume(); } @Test public void shouldCancelCurrentTransferOnRequest() throws Exception { startTransfer(); LanDownloaderMessage request = new LanDownloaderMessage(address, CANCEL_TRANSFER, downloadId); lanDownloader.onMessage(request); verify(transfer).cancel(); } @Test public void shouldGetDownloaderPriority() throws Exception { Integer priority = 0; when(config.getDownloaderPriority(LanDownloader.PRIORITY_KEY)).thenReturn(priority); assertEquals(priority, Integer.valueOf(lanDownloader.getDownloaderPriority())); } @Test public void shouldFindSourcesAndThenStart() throws Exception { lanDownloader.findSources(downloadId); lanDownloader.download(downloadId); verify(downloadFactory).createDownload(track, false); verify(download).start(); } @Test(expected = IllegalStateException.class) public void shouldNotFindSourcesIfDownloadInProgress() throws Exception { lanDownloader.download(downloadId); lanDownloader.findSources(downloadId); } @Test public void shouldRespondToTrackRequestIfHasTrack() throws Exception { LanDownloaderMessage request = new LanDownloaderMessage(address, TRACK_REQUEST, downloadId); lanDownloader.onMessage(request); verify(networkingService).sendTo(any(LanDownloaderMessage.class), eq(address)); } @Test public void shouldRespondTrackRequestNegativelyIfResponseIsNotOptional() throws Exception { LanDownloaderMessage request = new LanDownloaderMessage(address, TRACK_REQUEST, downloadId); request.setBody(Boolean.toString(true)); lanDownloader.onMessage(request); verify(networkingService).sendTo(any(LanDownloaderMessage.class), eq(address)); } }
/* * Copyright 2001-2014 Aspose Pty Ltd. All Rights Reserved. * * This file is part of Aspose.Words. The source code in this file * is only intended as a supplement to the documentation, and is provided * "as is", without warranty of any kind, either expressed or implied. */ package com.aspose.words.examples.viewers_visualizers.document_explorer; import com.aspose.words.*; import javax.swing.*; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.TreeNode; import javax.swing.tree.TreePath; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; /** * Base class used to provide GUI representation for document nodes. */ public class Item { private Node mNode; private DefaultMutableTreeNode mTreeNode; private ImageIcon mIcon; private static ArrayList<Field> mControlCharFields; private static Map<Integer, String> mNodeTypes; private static Map<Integer, String> mHeaderFooterTypes; private static Map<String, String> mItemSet; private static ArrayList mIconNames = new ArrayList(); /** * Creates Item for the supplied document node. */ public Item(Node node) { mNode = node; } /** * Returns the node in the document that this Item represents. */ public Node getNode() { return mNode; } /** * The display name for this Item. Can be customized by overriding this method in inheriting classes. */ public String getName() throws Exception { return getNodeTypeString(mNode); } /** * The text of the corresponding document node. */ public String getText() throws Exception { String text = mNode.getText(); // Most control characters are converted to human readable form. // E.g. [!PageBreak!], [!Cell!], etc. for (Field fieldInfo : mControlCharFields) { if (fieldInfo.getType() == char.class && Modifier.isStatic(fieldInfo.getModifiers())) { Character ch = fieldInfo.getChar(null); // Represent a paragraph break using the special formatting marker. This makes the text easier to read. if (fieldInfo.getName().equals("PARAGRAPH_BREAK_CHAR")) text = text.replace(ch.toString(), "?" + "\n"); // JTextArea lines are separated using simple "\n" character and not using system independent new line character. else text = text.replace(ch.toString(), java.text.MessageFormat.format("[!{0}!]", fieldInfo.getName().replace("_CHAR", ""))); } } // All break chars should be supplemented with line feeds text = text.replace("BREAK!]", "BREAK!]\n"); return text; } /** * Creates a TreeNode for this item to be displayed in the Document Explorer TreeView control. */ public DefaultMutableTreeNode getTreeNode() throws Exception { if (mTreeNode == null) { mTreeNode = new DefaultMutableTreeNode(this); if (!mIconNames.contains(getIconName())) { mIconNames.add(getIconName()); } if (mNode instanceof CompositeNode && ((CompositeNode) mNode).getChildNodes().getCount() > 0) { mTreeNode.add(new DefaultMutableTreeNode("#dummy")); } } return mTreeNode; } /** * Returns the icon to display in the Document Explorer TreeView control. */ public ImageIcon getIcon() throws Exception { if (mIcon == null) { mIcon = loadIcon(getIconName()); if (mIcon == null) mIcon = loadIcon("Node"); } return mIcon; } /** * The icon for this node can be customized by overriding this property in the inheriting classes. * The name represents name of .ico file without extension located in the Icons folder of the project. */ protected String getIconName() throws Exception { return getClass().getSimpleName().replace("Item", ""); } /** * Provides lazy on-expand loading of underlying tree nodes. */ public void onExpand() throws Exception { if ("#dummy".equals(getTreeNode().getFirstChild().toString())) { getTreeNode().removeAllChildren(); Globals.mTreeModel.reload(getTreeNode()); for (Object o : ((CompositeNode) mNode).getChildNodes()) { Node n = (Node) o; getTreeNode().add(Item.createItem(n).getTreeNode()); } } } /** * Loads and returns an icon from the assembly resource stream. */ private ImageIcon loadIcon(String iconName) { java.net.URL imgURL = MainForm.class.getResource("images/" + iconName + ".gif"); if (imgURL != null) return new ImageIcon(imgURL); else return null; } /** * Removes this node from the document and the tree. */ public void remove() throws Exception { if (this.isRemovable()) { mNode.remove(); TreeNode parent = mTreeNode.getParent(); mTreeNode.removeFromParent(); Globals.mTreeModel.reload(parent); TreePath path = new TreePath(Globals.mRootNode); Globals.mTree.setSelectionPath(path); } } /** * Returns if this node can be removed from the document. Some nodes such as the last paragraph in the * document cannot be removed. */ public boolean isRemovable() { return true; } /** * Static ctor. */ static { // Populate a list of node types along with their class implementation. mItemSet = new HashMap<String, String>(); for (Class itemClass : DocumentItems.class.getDeclaredClasses()) { try { String nodeTypeString = (String) itemClass.getField("NODE_TYPE_STRING").get(null); mItemSet.put(nodeTypeString, itemClass.getName()); } catch (Exception e) { // IllegalAccessException, NoSuchFieldException or NoSuchMethodException - skip such exceptions if there are any. } } // Fill a list containing the information of each control char. mControlCharFields = new ArrayList<Field>(); Field[] fields = ControlChar.class.getFields(); for (Field fieldInfo : fields) { if (fieldInfo.getType() == char.class && Modifier.isStatic(fieldInfo.getModifiers())) { if (!fieldInfo.getName().equals("SPACE_CHAR")) mControlCharFields.add(fieldInfo); } } // Map node type integer values to their equivalent string name. mNodeTypes = new HashMap<Integer, String>(); Field[] nodeTypefields = NodeType.class.getFields(); for (Field fieldInfo : nodeTypefields) { if (fieldInfo.getType() == int.class && Modifier.isStatic(fieldInfo.getModifiers())) { try { int integerValue = fieldInfo.getInt(null); mNodeTypes.put(integerValue, fieldInfo.getName()); } catch (IllegalAccessException e) { // Skip any invalid fields. } } } // Maps header/footer type integer values to string names. mHeaderFooterTypes = new HashMap<Integer, String>(); fields = HeaderFooterType.class.getFields(); for (Field fieldInfo : fields) { if (fieldInfo.getType() == int.class && Modifier.isStatic(fieldInfo.getModifiers())) { try { int integerValue = fieldInfo.getInt(null); mHeaderFooterTypes.put(integerValue, fieldInfo.getName()); } catch (IllegalAccessException e) { // Skip any invalid fields. } } } } /** * Item class factory implementation. */ public static Item createItem(Node node) throws ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, InstantiationException { String typeName = getNodeTypeString(node); if (mItemSet.containsKey(typeName)) return (Item) Class.forName(mItemSet.get(typeName)). getConstructor(DocumentItems.class, Node.class). newInstance(null, node); else return new Item(node); } /** * Object.toString method used by Tree. */ public String toString() { // Introduced non-checked RuntimeException on purpose to not change Object.toString() signature try { return getName(); } catch (Exception e) { throw new RuntimeException(e); } } /** * Convert numerical representation of the node type to string. */ private static String getNodeTypeString(Node node) { int nodeType = node.getNodeType(); if (mNodeTypes.containsKey(nodeType)) return mNodeTypes.get(nodeType); else return ""; } /** * Convert numerical representation of HeaderFooter integer type to string. */ protected static String getHeaderFooterTypeAsString(HeaderFooter headerFooter) throws Exception { int headerFooterType = headerFooter.getHeaderFooterType(); if (mHeaderFooterTypes.containsKey(headerFooterType)) return mHeaderFooterTypes.get(headerFooterType); else return ""; } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/compute/v1/compute.proto package com.google.cloud.compute.v1; /** * * * <pre> * </pre> * * Protobuf type {@code google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse} */ public final class InstancesGetEffectiveFirewallsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse) InstancesGetEffectiveFirewallsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use InstancesGetEffectiveFirewallsResponse.newBuilder() to construct. private InstancesGetEffectiveFirewallsResponse( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private InstancesGetEffectiveFirewallsResponse() { firewallPolicys_ = java.util.Collections.emptyList(); firewalls_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new InstancesGetEffectiveFirewallsResponse(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private InstancesGetEffectiveFirewallsResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case -2117002342: { if (!((mutable_bitField0_ & 0x00000002) != 0)) { firewalls_ = new java.util.ArrayList<com.google.cloud.compute.v1.Firewall>(); mutable_bitField0_ |= 0x00000002; } firewalls_.add( input.readMessage( com.google.cloud.compute.v1.Firewall.parser(), extensionRegistry)); break; } case -1007080942: { if (!((mutable_bitField0_ & 0x00000001) != 0)) { firewallPolicys_ = new java.util.ArrayList< com.google.cloud.compute.v1 .InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy>(); mutable_bitField0_ |= 0x00000001; } firewallPolicys_.add( input.readMessage( com.google.cloud.compute.v1 .InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy.parser(), extensionRegistry)); break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { if (((mutable_bitField0_ & 0x00000002) != 0)) { firewalls_ = java.util.Collections.unmodifiableList(firewalls_); } if (((mutable_bitField0_ & 0x00000001) != 0)) { firewallPolicys_ = java.util.Collections.unmodifiableList(firewallPolicys_); } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_InstancesGetEffectiveFirewallsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_InstancesGetEffectiveFirewallsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse.class, com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse.Builder.class); } public static final int FIREWALL_POLICYS_FIELD_NUMBER = 410985794; private java.util.List< com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy> firewallPolicys_; /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ @java.lang.Override public java.util.List< com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy> getFirewallPolicysList() { return firewallPolicys_; } /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ @java.lang.Override public java.util.List< ? extends com.google.cloud.compute.v1 .InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicyOrBuilder> getFirewallPolicysOrBuilderList() { return firewallPolicys_; } /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ @java.lang.Override public int getFirewallPolicysCount() { return firewallPolicys_.size(); } /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ @java.lang.Override public com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy getFirewallPolicys(int index) { return firewallPolicys_.get(index); } /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ @java.lang.Override public com.google.cloud.compute.v1 .InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicyOrBuilder getFirewallPolicysOrBuilder(int index) { return firewallPolicys_.get(index); } public static final int FIREWALLS_FIELD_NUMBER = 272245619; private java.util.List<com.google.cloud.compute.v1.Firewall> firewalls_; /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ @java.lang.Override public java.util.List<com.google.cloud.compute.v1.Firewall> getFirewallsList() { return firewalls_; } /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.compute.v1.FirewallOrBuilder> getFirewallsOrBuilderList() { return firewalls_; } /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ @java.lang.Override public int getFirewallsCount() { return firewalls_.size(); } /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ @java.lang.Override public com.google.cloud.compute.v1.Firewall getFirewalls(int index) { return firewalls_.get(index); } /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ @java.lang.Override public com.google.cloud.compute.v1.FirewallOrBuilder getFirewallsOrBuilder(int index) { return firewalls_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < firewalls_.size(); i++) { output.writeMessage(272245619, firewalls_.get(i)); } for (int i = 0; i < firewallPolicys_.size(); i++) { output.writeMessage(410985794, firewallPolicys_.get(i)); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < firewalls_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(272245619, firewalls_.get(i)); } for (int i = 0; i < firewallPolicys_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 410985794, firewallPolicys_.get(i)); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse)) { return super.equals(obj); } com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse other = (com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse) obj; if (!getFirewallPolicysList().equals(other.getFirewallPolicysList())) return false; if (!getFirewallsList().equals(other.getFirewallsList())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getFirewallPolicysCount() > 0) { hash = (37 * hash) + FIREWALL_POLICYS_FIELD_NUMBER; hash = (53 * hash) + getFirewallPolicysList().hashCode(); } if (getFirewallsCount() > 0) { hash = (37 * hash) + FIREWALLS_FIELD_NUMBER; hash = (53 * hash) + getFirewallsList().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * </pre> * * Protobuf type {@code google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse) com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_InstancesGetEffectiveFirewallsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_InstancesGetEffectiveFirewallsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse.class, com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse.Builder.class); } // Construct using // com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getFirewallPolicysFieldBuilder(); getFirewallsFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); if (firewallPolicysBuilder_ == null) { firewallPolicys_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); } else { firewallPolicysBuilder_.clear(); } if (firewallsBuilder_ == null) { firewalls_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); } else { firewallsBuilder_.clear(); } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_InstancesGetEffectiveFirewallsResponse_descriptor; } @java.lang.Override public com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse getDefaultInstanceForType() { return com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse .getDefaultInstance(); } @java.lang.Override public com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse build() { com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse buildPartial() { com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse result = new com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse(this); int from_bitField0_ = bitField0_; if (firewallPolicysBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { firewallPolicys_ = java.util.Collections.unmodifiableList(firewallPolicys_); bitField0_ = (bitField0_ & ~0x00000001); } result.firewallPolicys_ = firewallPolicys_; } else { result.firewallPolicys_ = firewallPolicysBuilder_.build(); } if (firewallsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { firewalls_ = java.util.Collections.unmodifiableList(firewalls_); bitField0_ = (bitField0_ & ~0x00000002); } result.firewalls_ = firewalls_; } else { result.firewalls_ = firewallsBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse) { return mergeFrom( (com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse other) { if (other == com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse .getDefaultInstance()) return this; if (firewallPolicysBuilder_ == null) { if (!other.firewallPolicys_.isEmpty()) { if (firewallPolicys_.isEmpty()) { firewallPolicys_ = other.firewallPolicys_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureFirewallPolicysIsMutable(); firewallPolicys_.addAll(other.firewallPolicys_); } onChanged(); } } else { if (!other.firewallPolicys_.isEmpty()) { if (firewallPolicysBuilder_.isEmpty()) { firewallPolicysBuilder_.dispose(); firewallPolicysBuilder_ = null; firewallPolicys_ = other.firewallPolicys_; bitField0_ = (bitField0_ & ~0x00000001); firewallPolicysBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFirewallPolicysFieldBuilder() : null; } else { firewallPolicysBuilder_.addAllMessages(other.firewallPolicys_); } } } if (firewallsBuilder_ == null) { if (!other.firewalls_.isEmpty()) { if (firewalls_.isEmpty()) { firewalls_ = other.firewalls_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureFirewallsIsMutable(); firewalls_.addAll(other.firewalls_); } onChanged(); } } else { if (!other.firewalls_.isEmpty()) { if (firewallsBuilder_.isEmpty()) { firewallsBuilder_.dispose(); firewallsBuilder_ = null; firewalls_ = other.firewalls_; bitField0_ = (bitField0_ & ~0x00000002); firewallsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getFirewallsFieldBuilder() : null; } else { firewallsBuilder_.addAllMessages(other.firewalls_); } } } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private java.util.List< com.google.cloud.compute.v1 .InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy> firewallPolicys_ = java.util.Collections.emptyList(); private void ensureFirewallPolicysIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { firewallPolicys_ = new java.util.ArrayList< com.google.cloud.compute.v1 .InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy>( firewallPolicys_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.compute.v1 .InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy, com.google.cloud.compute.v1 .InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy.Builder, com.google.cloud.compute.v1 .InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicyOrBuilder> firewallPolicysBuilder_; /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ public java.util.List< com.google.cloud.compute.v1 .InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy> getFirewallPolicysList() { if (firewallPolicysBuilder_ == null) { return java.util.Collections.unmodifiableList(firewallPolicys_); } else { return firewallPolicysBuilder_.getMessageList(); } } /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ public int getFirewallPolicysCount() { if (firewallPolicysBuilder_ == null) { return firewallPolicys_.size(); } else { return firewallPolicysBuilder_.getCount(); } } /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ public com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy getFirewallPolicys(int index) { if (firewallPolicysBuilder_ == null) { return firewallPolicys_.get(index); } else { return firewallPolicysBuilder_.getMessage(index); } } /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ public Builder setFirewallPolicys( int index, com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy value) { if (firewallPolicysBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFirewallPolicysIsMutable(); firewallPolicys_.set(index, value); onChanged(); } else { firewallPolicysBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ public Builder setFirewallPolicys( int index, com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy .Builder builderForValue) { if (firewallPolicysBuilder_ == null) { ensureFirewallPolicysIsMutable(); firewallPolicys_.set(index, builderForValue.build()); onChanged(); } else { firewallPolicysBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ public Builder addFirewallPolicys( com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy value) { if (firewallPolicysBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFirewallPolicysIsMutable(); firewallPolicys_.add(value); onChanged(); } else { firewallPolicysBuilder_.addMessage(value); } return this; } /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ public Builder addFirewallPolicys( int index, com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy value) { if (firewallPolicysBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFirewallPolicysIsMutable(); firewallPolicys_.add(index, value); onChanged(); } else { firewallPolicysBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ public Builder addFirewallPolicys( com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy .Builder builderForValue) { if (firewallPolicysBuilder_ == null) { ensureFirewallPolicysIsMutable(); firewallPolicys_.add(builderForValue.build()); onChanged(); } else { firewallPolicysBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ public Builder addFirewallPolicys( int index, com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy .Builder builderForValue) { if (firewallPolicysBuilder_ == null) { ensureFirewallPolicysIsMutable(); firewallPolicys_.add(index, builderForValue.build()); onChanged(); } else { firewallPolicysBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ public Builder addAllFirewallPolicys( java.lang.Iterable< ? extends com.google.cloud.compute.v1 .InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy> values) { if (firewallPolicysBuilder_ == null) { ensureFirewallPolicysIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, firewallPolicys_); onChanged(); } else { firewallPolicysBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ public Builder clearFirewallPolicys() { if (firewallPolicysBuilder_ == null) { firewallPolicys_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { firewallPolicysBuilder_.clear(); } return this; } /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ public Builder removeFirewallPolicys(int index) { if (firewallPolicysBuilder_ == null) { ensureFirewallPolicysIsMutable(); firewallPolicys_.remove(index); onChanged(); } else { firewallPolicysBuilder_.remove(index); } return this; } /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ public com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy .Builder getFirewallPolicysBuilder(int index) { return getFirewallPolicysFieldBuilder().getBuilder(index); } /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ public com.google.cloud.compute.v1 .InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicyOrBuilder getFirewallPolicysOrBuilder(int index) { if (firewallPolicysBuilder_ == null) { return firewallPolicys_.get(index); } else { return firewallPolicysBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ public java.util.List< ? extends com.google.cloud.compute.v1 .InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicyOrBuilder> getFirewallPolicysOrBuilderList() { if (firewallPolicysBuilder_ != null) { return firewallPolicysBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(firewallPolicys_); } } /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ public com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy .Builder addFirewallPolicysBuilder() { return getFirewallPolicysFieldBuilder() .addBuilder( com.google.cloud.compute.v1 .InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy .getDefaultInstance()); } /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ public com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy .Builder addFirewallPolicysBuilder(int index) { return getFirewallPolicysFieldBuilder() .addBuilder( index, com.google.cloud.compute.v1 .InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy .getDefaultInstance()); } /** * * * <pre> * Effective firewalls from firewall policies. * </pre> * * <code> * repeated .google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy firewall_policys = 410985794; * </code> */ public java.util.List< com.google.cloud.compute.v1 .InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy.Builder> getFirewallPolicysBuilderList() { return getFirewallPolicysFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.compute.v1 .InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy, com.google.cloud.compute.v1 .InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy.Builder, com.google.cloud.compute.v1 .InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicyOrBuilder> getFirewallPolicysFieldBuilder() { if (firewallPolicysBuilder_ == null) { firewallPolicysBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.compute.v1 .InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy, com.google.cloud.compute.v1 .InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy.Builder, com.google.cloud.compute.v1 .InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicyOrBuilder>( firewallPolicys_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); firewallPolicys_ = null; } return firewallPolicysBuilder_; } private java.util.List<com.google.cloud.compute.v1.Firewall> firewalls_ = java.util.Collections.emptyList(); private void ensureFirewallsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { firewalls_ = new java.util.ArrayList<com.google.cloud.compute.v1.Firewall>(firewalls_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.compute.v1.Firewall, com.google.cloud.compute.v1.Firewall.Builder, com.google.cloud.compute.v1.FirewallOrBuilder> firewallsBuilder_; /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ public java.util.List<com.google.cloud.compute.v1.Firewall> getFirewallsList() { if (firewallsBuilder_ == null) { return java.util.Collections.unmodifiableList(firewalls_); } else { return firewallsBuilder_.getMessageList(); } } /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ public int getFirewallsCount() { if (firewallsBuilder_ == null) { return firewalls_.size(); } else { return firewallsBuilder_.getCount(); } } /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ public com.google.cloud.compute.v1.Firewall getFirewalls(int index) { if (firewallsBuilder_ == null) { return firewalls_.get(index); } else { return firewallsBuilder_.getMessage(index); } } /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ public Builder setFirewalls(int index, com.google.cloud.compute.v1.Firewall value) { if (firewallsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFirewallsIsMutable(); firewalls_.set(index, value); onChanged(); } else { firewallsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ public Builder setFirewalls( int index, com.google.cloud.compute.v1.Firewall.Builder builderForValue) { if (firewallsBuilder_ == null) { ensureFirewallsIsMutable(); firewalls_.set(index, builderForValue.build()); onChanged(); } else { firewallsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ public Builder addFirewalls(com.google.cloud.compute.v1.Firewall value) { if (firewallsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFirewallsIsMutable(); firewalls_.add(value); onChanged(); } else { firewallsBuilder_.addMessage(value); } return this; } /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ public Builder addFirewalls(int index, com.google.cloud.compute.v1.Firewall value) { if (firewallsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureFirewallsIsMutable(); firewalls_.add(index, value); onChanged(); } else { firewallsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ public Builder addFirewalls(com.google.cloud.compute.v1.Firewall.Builder builderForValue) { if (firewallsBuilder_ == null) { ensureFirewallsIsMutable(); firewalls_.add(builderForValue.build()); onChanged(); } else { firewallsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ public Builder addFirewalls( int index, com.google.cloud.compute.v1.Firewall.Builder builderForValue) { if (firewallsBuilder_ == null) { ensureFirewallsIsMutable(); firewalls_.add(index, builderForValue.build()); onChanged(); } else { firewallsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ public Builder addAllFirewalls( java.lang.Iterable<? extends com.google.cloud.compute.v1.Firewall> values) { if (firewallsBuilder_ == null) { ensureFirewallsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, firewalls_); onChanged(); } else { firewallsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ public Builder clearFirewalls() { if (firewallsBuilder_ == null) { firewalls_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { firewallsBuilder_.clear(); } return this; } /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ public Builder removeFirewalls(int index) { if (firewallsBuilder_ == null) { ensureFirewallsIsMutable(); firewalls_.remove(index); onChanged(); } else { firewallsBuilder_.remove(index); } return this; } /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ public com.google.cloud.compute.v1.Firewall.Builder getFirewallsBuilder(int index) { return getFirewallsFieldBuilder().getBuilder(index); } /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ public com.google.cloud.compute.v1.FirewallOrBuilder getFirewallsOrBuilder(int index) { if (firewallsBuilder_ == null) { return firewalls_.get(index); } else { return firewallsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ public java.util.List<? extends com.google.cloud.compute.v1.FirewallOrBuilder> getFirewallsOrBuilderList() { if (firewallsBuilder_ != null) { return firewallsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(firewalls_); } } /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ public com.google.cloud.compute.v1.Firewall.Builder addFirewallsBuilder() { return getFirewallsFieldBuilder() .addBuilder(com.google.cloud.compute.v1.Firewall.getDefaultInstance()); } /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ public com.google.cloud.compute.v1.Firewall.Builder addFirewallsBuilder(int index) { return getFirewallsFieldBuilder() .addBuilder(index, com.google.cloud.compute.v1.Firewall.getDefaultInstance()); } /** * * * <pre> * Effective firewalls on the instance. * </pre> * * <code>repeated .google.cloud.compute.v1.Firewall firewalls = 272245619;</code> */ public java.util.List<com.google.cloud.compute.v1.Firewall.Builder> getFirewallsBuilderList() { return getFirewallsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.compute.v1.Firewall, com.google.cloud.compute.v1.Firewall.Builder, com.google.cloud.compute.v1.FirewallOrBuilder> getFirewallsFieldBuilder() { if (firewallsBuilder_ == null) { firewallsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.compute.v1.Firewall, com.google.cloud.compute.v1.Firewall.Builder, com.google.cloud.compute.v1.FirewallOrBuilder>( firewalls_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); firewalls_ = null; } return firewallsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse) private static final com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse(); } public static com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<InstancesGetEffectiveFirewallsResponse> PARSER = new com.google.protobuf.AbstractParser<InstancesGetEffectiveFirewallsResponse>() { @java.lang.Override public InstancesGetEffectiveFirewallsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new InstancesGetEffectiveFirewallsResponse(input, extensionRegistry); } }; public static com.google.protobuf.Parser<InstancesGetEffectiveFirewallsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<InstancesGetEffectiveFirewallsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.compute.v1.InstancesGetEffectiveFirewallsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright (c) 2010-2013 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.notifications.impl.formatters; import com.evolveum.midpoint.notifications.impl.NotificationFuctionsImpl; import com.evolveum.midpoint.prism.*; import com.evolveum.midpoint.prism.delta.ItemDelta; import com.evolveum.midpoint.prism.delta.ObjectDelta; import com.evolveum.midpoint.prism.path.IdItemPathSegment; import com.evolveum.midpoint.prism.path.ItemPath; import com.evolveum.midpoint.prism.path.ItemPathSegment; import com.evolveum.midpoint.prism.path.NameItemPathSegment; import com.evolveum.midpoint.prism.polystring.PolyString; import com.evolveum.midpoint.repo.api.RepositoryService; import com.evolveum.midpoint.schema.GetOperationOptions; import com.evolveum.midpoint.schema.SelectorOptions; import com.evolveum.midpoint.schema.constants.SchemaConstants; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.schema.util.ValueDisplayUtil; import com.evolveum.midpoint.util.DebugUtil; import com.evolveum.midpoint.util.PrettyPrinter; import com.evolveum.midpoint.util.exception.ObjectNotFoundException; import com.evolveum.midpoint.util.exception.SchemaException; import com.evolveum.midpoint.util.logging.LoggingUtils; import com.evolveum.midpoint.util.logging.Trace; import com.evolveum.midpoint.util.logging.TraceManager; import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowAssociationType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType; import org.apache.commons.lang.Validate; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Component; import javax.xml.namespace.QName; import java.util.*; /** * @author mederly */ @Component public class TextFormatter { @Autowired(required = true) @Qualifier("cacheRepositoryService") private transient RepositoryService cacheRepositoryService; private static final ResourceBundle RESOURCE_BUNDLE = ResourceBundle.getBundle( SchemaConstants.SCHEMA_LOCALIZATION_PROPERTIES_RESOURCE_BASE_PATH); private static final Trace LOGGER = TraceManager.getTrace(TextFormatter.class); public String formatObjectModificationDelta(ObjectDelta<? extends Objectable> objectDelta, List<ItemPath> hiddenPaths, boolean showOperationalAttributes) { return formatObjectModificationDelta(objectDelta, hiddenPaths, showOperationalAttributes, null, null); } // objectOld and objectNew are used for explaining changed container values, e.g. assignment[1]/tenantRef (see MID-2047) // if null, they are ignored public String formatObjectModificationDelta(ObjectDelta<? extends Objectable> objectDelta, List<ItemPath> hiddenPaths, boolean showOperationalAttributes, PrismObject objectOld, PrismObject objectNew) { Validate.notNull(objectDelta, "objectDelta is null"); Validate.isTrue(objectDelta.isModify(), "objectDelta is not a modification delta"); PrismObjectDefinition objectDefinition; if (objectNew != null && objectNew.getDefinition() != null) { objectDefinition = objectNew.getDefinition(); } else if (objectOld != null && objectOld.getDefinition() != null) { objectDefinition = objectOld.getDefinition(); } else { objectDefinition = null; } if (LOGGER.isTraceEnabled()) { LOGGER.trace("formatObjectModificationDelta: objectDelta = " + objectDelta.debugDump() + ", hiddenPaths = " + PrettyPrinter.prettyPrint(hiddenPaths)); } StringBuilder retval = new StringBuilder(); List<ItemDelta> toBeDisplayed = filterAndOrderItemDeltas(objectDelta, hiddenPaths, showOperationalAttributes); for (ItemDelta itemDelta : toBeDisplayed) { retval.append(" - "); retval.append(getItemDeltaLabel(itemDelta, objectDefinition)); retval.append(":\n"); formatItemDeltaContent(retval, itemDelta, hiddenPaths, showOperationalAttributes); } explainPaths(retval, toBeDisplayed, objectDefinition, objectOld, objectNew, hiddenPaths, showOperationalAttributes); return retval.toString(); } private void explainPaths(StringBuilder sb, List<ItemDelta> deltas, PrismObjectDefinition objectDefinition, PrismObject objectOld, PrismObject objectNew, List<ItemPath> hiddenPaths, boolean showOperationalAttributes) { if (objectOld == null && objectNew == null) { return; // no data - no point in trying } boolean first = true; List<ItemPath> alreadyExplained = new ArrayList<>(); for (ItemDelta itemDelta : deltas) { ItemPath pathToExplain = getPathToExplain(itemDelta); if (pathToExplain == null || ItemPath.containsSubpathOrEquivalent(alreadyExplained, pathToExplain)) { continue; // null or already processed } PrismObject source = null; Object item = null; if (objectNew != null) { item = objectNew.find(pathToExplain); source = objectNew; } if (item == null && objectOld != null) { item = objectOld.find(pathToExplain); source = objectOld; } if (item == null) { LOGGER.warn("Couldn't find {} in {} nor {}, no explanation could be created.", new Object[] {pathToExplain, objectNew, objectOld}); continue; } if (first) { sb.append("\nNotes:\n"); first = false; } String label = getItemPathLabel(pathToExplain, itemDelta.getDefinition(), objectDefinition); // the item should be a PrismContainerValue if (item instanceof PrismContainerValue) { sb.append(" - ").append(label).append(":\n"); formatContainerValue(sb, " ", (PrismContainerValue) item, false, hiddenPaths, showOperationalAttributes); } else { LOGGER.warn("{} in {} was expected to be a PrismContainerValue; it is {} instead", new Object[]{pathToExplain, source, item.getClass()}); if (item instanceof PrismContainer) { formatPrismContainer(sb, " ", (PrismContainer) item, false, hiddenPaths, showOperationalAttributes); } else if (item instanceof PrismReference) { formatPrismReference(sb, " ", (PrismReference) item, false); } else if (item instanceof PrismProperty) { formatPrismProperty(sb, " ", (PrismProperty) item); } else { sb.append("Unexpected item: ").append(item).append("\n"); } } alreadyExplained.add(pathToExplain); } } private void formatItemDeltaContent(StringBuilder sb, ItemDelta itemDelta, List<ItemPath> hiddenPaths, boolean showOperationalAttributes) { formatItemDeltaValues(sb, "ADD", itemDelta.getValuesToAdd(), false, hiddenPaths, showOperationalAttributes); formatItemDeltaValues(sb, "DELETE", itemDelta.getValuesToDelete(), true, hiddenPaths, showOperationalAttributes); formatItemDeltaValues(sb, "REPLACE", itemDelta.getValuesToReplace(), false, hiddenPaths, showOperationalAttributes); } private void formatItemDeltaValues(StringBuilder sb, String type, Collection<? extends PrismValue> values, boolean mightBeRemoved, List<ItemPath> hiddenPaths, boolean showOperationalAttributes) { if (values != null) { for (PrismValue prismValue : values) { sb.append(" - " + type + ": "); String prefix = " "; formatPrismValue(sb, prefix, prismValue, mightBeRemoved, hiddenPaths, showOperationalAttributes); if (!(prismValue instanceof PrismContainerValue)) { // container values already end with newline sb.append("\n"); } } } } // todo - should each hiddenAttribute be prefixed with something like F_ATTRIBUTE? Currently it should not be. public String formatAccountAttributes(ShadowType shadowType, List<ItemPath> hiddenAttributes, boolean showOperationalAttributes) { Validate.notNull(shadowType, "shadowType is null"); StringBuilder retval = new StringBuilder(); if (shadowType.getAttributes() != null) { formatContainerValue(retval, "", shadowType.getAttributes().asPrismContainerValue(), false, hiddenAttributes, showOperationalAttributes); } if (shadowType.getCredentials() != null) { formatContainerValue(retval, "", shadowType.getCredentials().asPrismContainerValue(), false, hiddenAttributes, showOperationalAttributes); } if (shadowType.getActivation() != null) { formatContainerValue(retval, "", shadowType.getActivation().asPrismContainerValue(), false, hiddenAttributes, showOperationalAttributes); } if (shadowType.getAssociation() != null) { boolean first = true; for (ShadowAssociationType shadowAssociationType : shadowType.getAssociation()) { if (first) { first = false; retval.append("\n"); } retval.append("Association:\n"); formatContainerValue(retval, " ", shadowAssociationType.asPrismContainerValue(), false, hiddenAttributes, showOperationalAttributes); retval.append("\n"); } } return retval.toString(); } public String formatObject(PrismObject object, List<ItemPath> hiddenPaths, boolean showOperationalAttributes) { Validate.notNull(object, "object is null"); StringBuilder retval = new StringBuilder(); formatContainerValue(retval, "", object.getValue(), false, hiddenPaths, showOperationalAttributes); return retval.toString(); } private void formatPrismValue(StringBuilder sb, String prefix, PrismValue prismValue, boolean mightBeRemoved, List<ItemPath> hiddenPaths, boolean showOperationalAttributes) { if (prismValue instanceof PrismPropertyValue) { sb.append(ValueDisplayUtil.toStringValue((PrismPropertyValue) prismValue)); } else if (prismValue instanceof PrismReferenceValue) { sb.append(formatReferenceValue((PrismReferenceValue) prismValue, mightBeRemoved)); } else if (prismValue instanceof PrismContainerValue) { sb.append("\n"); formatContainerValue(sb, prefix, (PrismContainerValue) prismValue, mightBeRemoved, hiddenPaths, showOperationalAttributes); } else { sb.append("Unexpected PrismValue type: "); sb.append(prismValue); LOGGER.error("Unexpected PrismValue type: " + prismValue.getClass() + ": " + prismValue); } } private void formatContainerValue(StringBuilder sb, String prefix, PrismContainerValue containerValue, boolean mightBeRemoved, List<ItemPath> hiddenPaths, boolean showOperationalAttributes) { // sb.append("Container of type " + containerValue.getParent().getDefinition().getTypeName()); // sb.append("\n"); List<Item> toBeDisplayed = filterAndOrderItems(containerValue.getItems(), hiddenPaths, showOperationalAttributes); for (Item item : toBeDisplayed) { if (item instanceof PrismProperty) { formatPrismProperty(sb, prefix, item); } else if (item instanceof PrismReference) { formatPrismReference(sb, prefix, item, mightBeRemoved); } else if (item instanceof PrismContainer) { formatPrismContainer(sb, prefix, item, mightBeRemoved, hiddenPaths, showOperationalAttributes); } else { sb.append("Unexpected Item type: "); sb.append(item); sb.append("\n"); LOGGER.error("Unexpected Item type: " + item.getClass() + ": " + item); } } } private void formatPrismContainer(StringBuilder sb, String prefix, Item item, boolean mightBeRemoved, List<ItemPath> hiddenPaths, boolean showOperationalAttributes) { for (PrismContainerValue subContainerValue : ((PrismContainer<? extends Containerable>) item).getValues()) { sb.append(prefix); sb.append(" - "); sb.append(getItemLabel(item)); if (subContainerValue.getId() != null) { sb.append(" #").append(subContainerValue.getId()); } sb.append(":\n"); String prefixSubContainer = prefix + " "; formatContainerValue(sb, prefixSubContainer, subContainerValue, mightBeRemoved, hiddenPaths, showOperationalAttributes); } } private void formatPrismReference(StringBuilder sb, String prefix, Item item, boolean mightBeRemoved) { sb.append(prefix); sb.append(" - "); sb.append(getItemLabel(item)); sb.append(": "); if (item.size() > 1) { for (PrismReferenceValue referenceValue : ((PrismReference) item).getValues()) { sb.append("\n"); sb.append(prefix + " - "); sb.append(formatReferenceValue(referenceValue, mightBeRemoved)); } } else if (item.size() == 1) { sb.append(formatReferenceValue(((PrismReference) item).getValue(0), mightBeRemoved)); } sb.append("\n"); } private void formatPrismProperty(StringBuilder sb, String prefix, Item item) { sb.append(prefix); sb.append(" - "); sb.append(getItemLabel(item)); sb.append(": "); if (item.size() > 1) { for (PrismPropertyValue propertyValue : ((PrismProperty<? extends Object>) item).getValues()) { sb.append("\n"); sb.append(prefix + " - "); sb.append(ValueDisplayUtil.toStringValue(propertyValue)); } } else if (item.size() == 1) { sb.append(ValueDisplayUtil.toStringValue(((PrismProperty<? extends Object>) item).getValue(0))); } sb.append("\n"); } private String formatReferenceValue(PrismReferenceValue value, boolean mightBeRemoved) { OperationResult result = new OperationResult("dummy"); PrismObject<? extends ObjectType> object = value.getObject(); if (object == null) { object = getPrismObject(value.getOid(), mightBeRemoved, result); } String qualifier = ""; if (object != null && object.asObjectable() instanceof ShadowType) { ShadowType shadowType = (ShadowType) object.asObjectable(); ResourceType resourceType = shadowType.getResource(); if (resourceType == null) { PrismObject<? extends ObjectType> resource = getPrismObject(shadowType.getResourceRef().getOid(), false, result); if (resource != null) { resourceType = (ResourceType) resource.asObjectable(); } } if (resourceType != null) { qualifier = " on " + resourceType.getName(); } else { qualifier = " on resource " + shadowType.getResourceRef().getOid(); } } if (object != null) { return PolyString.getOrig(object.asObjectable().getName()) + " (" + object.toDebugType() + ")" + qualifier; } else { if (mightBeRemoved) { return "(cannot display the name of " + localPart(value.getTargetType()) + ":" + value.getOid() + ", as it might be already removed)"; } else { return localPart(value.getTargetType()) + ":" + value.getOid(); } } } private PrismObject<? extends ObjectType> getPrismObject(String oid, boolean mightBeRemoved, OperationResult result) { try { Collection<SelectorOptions<GetOperationOptions>> options = SelectorOptions.createCollection(GetOperationOptions.createReadOnly()); return cacheRepositoryService.getObject(ObjectType.class, oid, options, result); } catch (ObjectNotFoundException e) { if (!mightBeRemoved) { LoggingUtils.logException(LOGGER, "Couldn't resolve reference when displaying object name within a notification (it might be already removed)", e); } else { } } catch (SchemaException e) { LoggingUtils.logException(LOGGER, "Couldn't resolve reference when displaying object name within a notification", e); } return null; } private String localPartOfType(Item item) { if (item.getDefinition() != null) { return localPart(item.getDefinition().getTypeName()); } else { return null; } } private String localPart(QName qname) { return qname == null ? null : qname.getLocalPart(); } // we call this on filtered list of item deltas - all of they have definition set private String getItemDeltaLabel(ItemDelta itemDelta, PrismObjectDefinition objectDefinition) { return getItemPathLabel(itemDelta.getPath(), itemDelta.getDefinition(), objectDefinition); } private String getItemPathLabel(ItemPath path, Definition deltaDefinition, PrismObjectDefinition objectDefinition) { NameItemPathSegment lastNamedSegment = path.lastNamed(); StringBuilder sb = new StringBuilder(); for (ItemPathSegment segment : path.getSegments()) { if (segment instanceof NameItemPathSegment) { if (sb.length() > 0) { sb.append("/"); } Definition itemDefinition; if (objectDefinition == null) { if (segment == lastNamedSegment) { // definition for last segment is the definition taken from delta itemDefinition = deltaDefinition; // this may be null but we don't care } else { itemDefinition = null; // definitions for previous segments are unknown } } else { // todo we could make this iterative (resolving definitions while walking down the path); but this is definitely simpler to implement and debug :) itemDefinition = objectDefinition.findItemDefinition(path.allUpToIncluding(segment)); } if (itemDefinition != null && itemDefinition.getDisplayName() != null) { sb.append(resolve(itemDefinition.getDisplayName())); } else { sb.append(((NameItemPathSegment) segment).getName().getLocalPart()); } } else if (segment instanceof IdItemPathSegment) { sb.append("[").append(((IdItemPathSegment) segment).getId()).append("]"); } } return sb.toString(); } private String resolve(String key) { if (key != null && RESOURCE_BUNDLE.containsKey(key)) { return RESOURCE_BUNDLE.getString(key); } else { return key; } } // we call this on filtered list of item deltas - all of they have definition set private ItemPath getPathToExplain(ItemDelta itemDelta) { ItemPath path = itemDelta.getPath(); for (int i = 0; i < path.size(); i++) { ItemPathSegment segment = path.getSegments().get(i); if (segment instanceof IdItemPathSegment) { if (i < path.size()-1 || itemDelta.isDelete()) { return path.allUpToIncluding(i); } else { // this means that the path ends with [id] segment *and* the value(s) are // only added and deleted, i.e. they are shown in the delta anyway // (actually it is questionable whether path in delta can end with [id] segment, // but we test for this case just to be sure) return null; } } } return null; } private List<ItemDelta> filterAndOrderItemDeltas(ObjectDelta<? extends Objectable> objectDelta, List<ItemPath> hiddenPaths, boolean showOperationalAttributes) { List<ItemDelta> toBeDisplayed = new ArrayList<ItemDelta>(objectDelta.getModifications().size()); List<QName> noDefinition = new ArrayList<>(); for (ItemDelta itemDelta: objectDelta.getModifications()) { if (itemDelta.getDefinition() != null) { if ((showOperationalAttributes || !itemDelta.getDefinition().isOperational()) && !NotificationFuctionsImpl .isAmongHiddenPaths(itemDelta.getPath(), hiddenPaths)) { toBeDisplayed.add(itemDelta); } } else { noDefinition.add(itemDelta.getElementName()); } } if (!noDefinition.isEmpty()) { LOGGER.error("ItemDeltas for {} without definition - WILL NOT BE INCLUDED IN NOTIFICATION. Containing object delta:\n{}", noDefinition, objectDelta.debugDump()); } Collections.sort(toBeDisplayed, new Comparator<ItemDelta>() { @Override public int compare(ItemDelta delta1, ItemDelta delta2) { Integer order1 = delta1.getDefinition().getDisplayOrder(); Integer order2 = delta2.getDefinition().getDisplayOrder(); if (order1 != null && order2 != null) { return order1 - order2; } else if (order1 == null && order2 == null) { return 0; } else if (order1 == null) { return 1; } else { return -1; } } }); return toBeDisplayed; } // we call this on filtered list of items - all of them have definition set private String getItemLabel(Item item) { return item.getDefinition().getDisplayName() != null ? resolve(item.getDefinition().getDisplayName()) : item.getElementName().getLocalPart(); } private List<Item> filterAndOrderItems(List<Item> items, List<ItemPath> hiddenPaths, boolean showOperationalAttributes) { if (items == null) { return new ArrayList<>(); } List<Item> toBeDisplayed = new ArrayList<Item>(items.size()); List<QName> noDefinition = new ArrayList<>(); for (Item item : items) { if (item.getDefinition() != null) { boolean isHidden = NotificationFuctionsImpl.isAmongHiddenPaths(item.getPath(), hiddenPaths); if (!isHidden && (showOperationalAttributes || !item.getDefinition().isOperational())) { toBeDisplayed.add(item); } } else { noDefinition.add(item.getElementName()); } } if (!noDefinition.isEmpty()) { LOGGER.error("Items {} without definition - THEY WILL NOT BE INCLUDED IN NOTIFICATION.\nAll items:\n{}", noDefinition, DebugUtil.debugDump(items)); } Collections.sort(toBeDisplayed, new Comparator<Item>() { @Override public int compare(Item item1, Item item2) { Integer order1 = item1.getDefinition().getDisplayOrder(); Integer order2 = item2.getDefinition().getDisplayOrder(); if (order1 != null && order2 != null) { return order1 - order2; } else if (order1 == null && order2 == null) { return 0; } else if (order1 == null) { return 1; } else { return -1; } } }); return toBeDisplayed; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package storm.mesos.util; import com.google.common.base.Function; import com.google.common.collect.Lists; import org.apache.commons.lang3.StringUtils; import org.apache.mesos.Protos.Offer; import org.apache.mesos.Protos.OfferID; import org.apache.mesos.Protos.Resource; import org.apache.mesos.Protos.TaskInfo; import org.apache.mesos.Protos.TaskStatus; import org.apache.mesos.Protos.Value.Range; import org.apache.mesos.Protos.Value.Ranges; import org.apache.mesos.Protos.Value.Set; import org.json.simple.JSONValue; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; /** * This utility class provides methods to improve logging of Mesos protobuf objects. * These methods don't perform any logging directly, instead they return Strings * which can be logged by callers. * * The methods offer more concise and readable String representations of protobuf * objects than you get by calling a protobuf object's native toString method. * * Another advantage over standard protobuf toString() output is that this output is * in proper JSON format, resulting in logs that can be more easily parsed. * This desire to print in JSON is why we populate Maps below for each multi-field * object. Also, we use LinkedHashMap and TreeMap to ensure the order of the fields * in multi-field objects stays consistent. That allows log lines to be visually * compared whilst examining the state of the system. * * TODO(erikdw): * 1. Check whether a value is set/null when deciding whether to include it. * 2. Currently we only include the object values that we care about for a * storm-only mesos-cluster. We should instead allow configuration for choosing * which fields to include (e.g., a bitmap toggling certain fields on/off). * 3. Allow cleanly logging to separate Logger, to allow configuring the logs going to * a separate log file. The complication is that these methods lack context, * they are just pretty-printing the protobuf objects. */ public class PrettyProtobuf { /** * Pretty-print mesos protobuf TaskStatus. */ public static String taskStatusToString(TaskStatus taskStatus) { Map<String, String> map = new LinkedHashMap<>(); map.put("task_id", taskStatus.getTaskId().getValue()); map.put("slave_id", taskStatus.getSlaveId().getValue()); map.put("state", taskStatus.getState().toString()); if (taskStatus.hasMessage()) { map.put("message", taskStatus.getMessage()); } return JSONValue.toJSONString(map); } /** * Pretty-print mesos protobuf TaskInfo. * <p/> * XXX(erikdw): not including command, container (+data), nor health_check. */ public static String taskInfoToString(TaskInfo task) { Map<String, String> map = new LinkedHashMap<>(); map.put("task_id", task.getTaskId().getValue()); map.put("slave_id", task.getSlaveId().getValue()); map.putAll(resourcesToOrderedMap(task.getResourcesList())); map.put("executor_id", task.getExecutor().getExecutorId().getValue()); return JSONValue.toJSONString(map); } /** * Pretty-print mesos protobuf Offer. * <p/> * XXX(erikdw): not including slave_id, attributes, executor_ids, nor framework_id. */ public static String offerToString(Offer offer) { Map<String, String> map = new LinkedHashMap<>(); map.put("offer_id", offer.getId().getValue()); map.put("hostname", offer.getHostname()); map.putAll(resourcesToOrderedMap(offer.getResourcesList())); return JSONValue.toJSONString(map); } /** * Pretty-print List of mesos protobuf Offers. */ public static String offerListToString(List<Offer> offers) { List<String> offersAsStrings = Lists.transform(offers, offerToStringTransform); return String.format("[\n%s]", StringUtils.join(offersAsStrings, ",\n")); } /** * Pretty-print List of mesos protobuf TaskInfos. */ public static String taskInfoListToString(List<TaskInfo> tasks) { List<String> tasksAsStrings = Lists.transform(tasks, taskInfoToStringTransform); return String.format("[%s]", StringUtils.join(tasksAsStrings, ", ")); } /** * Pretty-print the values in the Offer map used in MesosNimbus. * <p/> * Callers must ensure they have locked the Map first, else they could * have inconsistent output since the _offers map is touched from both * mesos-driven events and storm-driven calls. * <p/> * TODO:(erikdw): figure out a better design that removes the need * for external callers to lock before calling this method. */ public static String offerMapToString(Map<OfferID, Offer> offers) { List<String> offersAsStrings = Lists.transform(new ArrayList<Offer>(offers.values()), offerToStringTransform); return String.format("[\n%s]", StringUtils.join(offersAsStrings, ",\n")); } /** * Pretty-print the key set of the Offer map used in MesosNimbus. * <p/> * Callers must ensure they have locked the Map first, else they could * have inconsistent output since the _offers map is touched from both * mesos-driven events and storm-driven calls. * <p/> * TODO:(erikdw): implement same fix as required for offerMapToString. */ public static String offerMapKeySetToString(Map<OfferID, Offer> offers) { ArrayList<String> offerIdList = new ArrayList<String>(); for (OfferID offerId : offers.keySet()) { offerIdList.add(offerId.getValue()); } return String.format("[%s]", StringUtils.join(offerIdList, ", ")); } /** * Wrapper around offerToString which allows using gauva's transform utility. */ private static Function<Offer, String> offerToStringTransform = new Function<Offer, String>() { public String apply(Offer o) { return offerToString(o); } }; /** * Wrapper around taskInfoToString which allows using gauva's transform utility. */ private static Function<TaskInfo, String> taskInfoToStringTransform = new Function<TaskInfo, String>() { public String apply(TaskInfo t) { return taskInfoToString(t); } }; /** * Wrapper around rangeToString which allows using gauva's transform utility. */ private static Function<Range, String> rangeToStringTransform = new Function<Range, String>() { public String apply(Range r) { return rangeToString(r); } }; /** * Create String representation of mesos protobuf Range type. */ private static String rangeToString(Range range) { String beginStr = String.valueOf(range.getBegin()); String endStr = String.valueOf(range.getEnd()); /* * A Range representing a single number still has both Range.begin * and Range.end populated, but they are set to the same value. * In that case we just return "N" instead of "N-N". */ if (range.getBegin() == range.getEnd()) { return beginStr; } else { return String.format("%s-%s", beginStr, endStr); } } /** * Pretty-print mesos protobuf Ranges. */ private static String rangesToString(Ranges ranges) { List<String> rangesAsStrings = Lists.transform(ranges.getRangeList(), rangeToStringTransform); return String.format("[%s]", StringUtils.join(rangesAsStrings, ",")); } /** * Pretty-print mesos protobuf Set. */ private static String setToString(Set set) { return String.format("[%s]", StringUtils.join(set.getItemList(), ",")); } /** * Return Resource names mapped to values. */ private static Map<String, String> resourcesToOrderedMap(List<Resource> resources) { Map<String, String> map = new TreeMap<>(); for (Resource r : resources) { String name; String value = ""; if (r.hasRole()) { name = String.format("%s(%s)", r.getName(), r.getRole()); } else { name = r.getName(); } switch (r.getType()) { case SCALAR: value = String.valueOf(r.getScalar().getValue()); break; case RANGES: value = rangesToString(r.getRanges()); break; case SET: value = setToString(r.getSet()); break; default: // If hit, then a new Resource Type needs to be handled here. value = String.format("Unrecognized Resource Type: `%s'", r.getType()); break; } map.put(name, value); } return map; } /** * Wrapper around getTrimmedString which allows using gauva's transform utility. */ private static Function<OfferID, String> offerIDToStringTransform = new Function<OfferID, String>() { public String apply(OfferID o) { return o.getValue().toString(); } }; public static String offerIDListToString(List<OfferID> offerIDList) { List<String> offerIDsAsStrings = Lists.transform(offerIDList, offerIDToStringTransform); return String.format("[%s]", StringUtils.join(offerIDsAsStrings, ", ")); } private static Function<TaskStatus, String> taskStatusToTaskIDStringTransform = new Function<TaskStatus, String>() { public String apply(TaskStatus t) { return String.format("\"%s\"", t.getTaskId().getValue().toString()); } }; public static String taskStatusListToTaskIDsString(List<TaskStatus> taskStatusList) { List<String> taskIDsAsStrings = Lists.transform(taskStatusList, taskStatusToTaskIDStringTransform); return String.format("[%s]", StringUtils.join(taskIDsAsStrings, ", ")); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.rest.action.search; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.QuerySourceBuilder; import org.elasticsearch.client.Client; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.exists.RestExistsAction; import org.elasticsearch.rest.action.support.RestActions; import org.elasticsearch.rest.action.support.RestStatusToXContentListener; import org.elasticsearch.search.Scroll; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.source.FetchSourceContext; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.sort.SortOrder; import static org.elasticsearch.common.unit.TimeValue.parseTimeValue; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.search.suggest.SuggestBuilders.termSuggestion; /** * */ public class RestSearchAction extends BaseRestHandler { @Inject public RestSearchAction(Settings settings, RestController controller, Client client) { super(settings, controller, client); controller.registerHandler(GET, "/_search", this); controller.registerHandler(POST, "/_search", this); controller.registerHandler(GET, "/{index}/_search", this); controller.registerHandler(POST, "/{index}/_search", this); controller.registerHandler(GET, "/{index}/{type}/_search", this); controller.registerHandler(POST, "/{index}/{type}/_search", this); controller.registerHandler(GET, "/_search/template", this); controller.registerHandler(POST, "/_search/template", this); controller.registerHandler(GET, "/{index}/_search/template", this); controller.registerHandler(POST, "/{index}/_search/template", this); controller.registerHandler(GET, "/{index}/{type}/_search/template", this); controller.registerHandler(POST, "/{index}/{type}/_search/template", this); RestExistsAction restExistsAction = new RestExistsAction(settings, controller, client); controller.registerHandler(GET, "/_search/exists", restExistsAction); controller.registerHandler(POST, "/_search/exists", restExistsAction); controller.registerHandler(GET, "/{index}/_search/exists", restExistsAction); controller.registerHandler(POST, "/{index}/_search/exists", restExistsAction); controller.registerHandler(GET, "/{index}/{type}/_search/exists", restExistsAction); controller.registerHandler(POST, "/{index}/{type}/_search/exists", restExistsAction); } @Override public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) { SearchRequest searchRequest = new SearchRequest(); RestSearchAction.parseSearchRequest(searchRequest, request, parseFieldMatcher, null); client.search(searchRequest, new RestStatusToXContentListener<SearchResponse>(channel)); } public static void parseSearchRequest(SearchRequest searchRequest, RestRequest request, ParseFieldMatcher parseFieldMatcher, BytesReference bodyContent) { searchRequest.indices(Strings.splitStringByCommaToArray(request.param("index"))); // get the content, and put it in the body // add content/source as template if template flag is set boolean isTemplateRequest = request.path().endsWith("/template"); if (bodyContent == null) { if (RestActions.hasBodyContent(request)) { bodyContent = RestActions.getRestContent(request); } } if (bodyContent != null) { if (isTemplateRequest) { searchRequest.templateSource(bodyContent); } else { searchRequest.source(bodyContent); } } // do not allow 'query_and_fetch' or 'dfs_query_and_fetch' search types // from the REST layer. these modes are an internal optimization and should // not be specified explicitly by the user. String searchType = request.param("search_type"); if (SearchType.fromString(searchType, parseFieldMatcher).equals(SearchType.QUERY_AND_FETCH) || SearchType.fromString(searchType, parseFieldMatcher).equals(SearchType.DFS_QUERY_AND_FETCH)) { throw new IllegalArgumentException("Unsupported search type [" + searchType + "]"); } else { searchRequest.searchType(searchType); } searchRequest.extraSource(parseSearchSource(request)); searchRequest.requestCache(request.paramAsBoolean("request_cache", null)); searchRequest.tokenRangesBitsetCache(request.paramAsBoolean("token_ranges_bitset_cache", null)); String scroll = request.param("scroll"); if (scroll != null) { searchRequest.scroll(new Scroll(parseTimeValue(scroll, null, "scroll"))); } searchRequest.types(Strings.splitStringByCommaToArray(request.param("type"))); searchRequest.routing(request.param("routing")); searchRequest.preference(request.param("preference")); searchRequest.tokenRanges(request.paramsAsTokenRanges("token_ranges")); searchRequest.indicesOptions(IndicesOptions.fromRequest(request, searchRequest.indicesOptions())); } public static SearchSourceBuilder parseSearchSource(RestRequest request) { SearchSourceBuilder searchSourceBuilder = null; QuerySourceBuilder querySourceBuilder = RestActions.parseQuerySource(request); if (querySourceBuilder != null) { searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.query(querySourceBuilder); } int from = request.paramAsInt("from", -1); if (from != -1) { if (searchSourceBuilder == null) { searchSourceBuilder = new SearchSourceBuilder(); } searchSourceBuilder.from(from); } int size = request.paramAsInt("size", -1); if (size != -1) { if (searchSourceBuilder == null) { searchSourceBuilder = new SearchSourceBuilder(); } searchSourceBuilder.size(size); } if (request.hasParam("explain")) { if (searchSourceBuilder == null) { searchSourceBuilder = new SearchSourceBuilder(); } searchSourceBuilder.explain(request.paramAsBoolean("explain", null)); } if (request.hasParam("version")) { if (searchSourceBuilder == null) { searchSourceBuilder = new SearchSourceBuilder(); } searchSourceBuilder.version(request.paramAsBoolean("version", null)); } if (request.hasParam("timeout")) { if (searchSourceBuilder == null) { searchSourceBuilder = new SearchSourceBuilder(); } searchSourceBuilder.timeout(request.paramAsTime("timeout", null)); } if (request.hasParam("terminate_after")) { if (searchSourceBuilder == null) { searchSourceBuilder = new SearchSourceBuilder(); } int terminateAfter = request.paramAsInt("terminate_after", SearchContext.DEFAULT_TERMINATE_AFTER); if (terminateAfter < 0) { throw new IllegalArgumentException("terminateAfter must be > 0"); } else if (terminateAfter > 0) { searchSourceBuilder.terminateAfter(terminateAfter); } } String sField = request.param("fields"); if (sField != null) { if (searchSourceBuilder == null) { searchSourceBuilder = new SearchSourceBuilder(); } if (!Strings.hasText(sField)) { searchSourceBuilder.noFields(); } else { String[] sFields = Strings.splitStringByCommaToArray(sField); if (sFields != null) { for (String field : sFields) { searchSourceBuilder.field(field); } } } } String sFieldDataFields = request.param("fielddata_fields"); if (sFieldDataFields != null) { if (searchSourceBuilder == null) { searchSourceBuilder = new SearchSourceBuilder(); } if (Strings.hasText(sFieldDataFields)) { String[] sFields = Strings.splitStringByCommaToArray(sFieldDataFields); if (sFields != null) { for (String field : sFields) { searchSourceBuilder.fieldDataField(field); } } } } FetchSourceContext fetchSourceContext = FetchSourceContext.parseFromRestRequest(request); if (fetchSourceContext != null) { if (searchSourceBuilder == null) { searchSourceBuilder = new SearchSourceBuilder(); } searchSourceBuilder.fetchSource(fetchSourceContext); } if (request.hasParam("track_scores")) { if (searchSourceBuilder == null) { searchSourceBuilder = new SearchSourceBuilder(); } searchSourceBuilder.trackScores(request.paramAsBoolean("track_scores", false)); } String sSorts = request.param("sort"); if (sSorts != null) { if (searchSourceBuilder == null) { searchSourceBuilder = new SearchSourceBuilder(); } String[] sorts = Strings.splitStringByCommaToArray(sSorts); for (String sort : sorts) { int delimiter = sort.lastIndexOf(":"); if (delimiter != -1) { String sortField = sort.substring(0, delimiter); String reverse = sort.substring(delimiter + 1); if ("asc".equals(reverse)) { searchSourceBuilder.sort(sortField, SortOrder.ASC); } else if ("desc".equals(reverse)) { searchSourceBuilder.sort(sortField, SortOrder.DESC); } } else { searchSourceBuilder.sort(sort); } } } String sStats = request.param("stats"); if (sStats != null) { if (searchSourceBuilder == null) { searchSourceBuilder = new SearchSourceBuilder(); } searchSourceBuilder.stats(Strings.splitStringByCommaToArray(sStats)); } String suggestField = request.param("suggest_field"); if (suggestField != null) { String suggestText = request.param("suggest_text", request.param("q")); int suggestSize = request.paramAsInt("suggest_size", 5); if (searchSourceBuilder == null) { searchSourceBuilder = new SearchSourceBuilder(); } String suggestMode = request.param("suggest_mode"); searchSourceBuilder.suggest().addSuggestion( termSuggestion(suggestField).field(suggestField).text(suggestText).size(suggestSize) .suggestMode(suggestMode) ); } return searchSourceBuilder; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.cql3; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.config.Schema; import org.apache.cassandra.db.marshal.*; import org.apache.cassandra.exceptions.InvalidRequestException; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.exceptions.SyntaxException; import org.apache.cassandra.schema.KeyspaceMetadata; import org.apache.cassandra.schema.Types; import org.apache.cassandra.serializers.CollectionSerializer; import org.apache.cassandra.serializers.MarshalException; import org.apache.cassandra.utils.ByteBufferUtil; public interface CQL3Type { static final Logger logger = LoggerFactory.getLogger(CQL3Type.class); public boolean isCollection(); public AbstractType<?> getType(); /** * Generate CQL literal from this type's serialized representation using the specified protocol version. * Convinience method for {@link #toCQLLiteral(ByteBuffer, int, StringBuilder)} that just returns a {@code String}. */ public default String asCQLLiteral(ByteBuffer buffer, int version) { StringBuilder sb = new StringBuilder(); toCQLLiteral(buffer, version, sb); return sb.toString(); } /** * Generate CQL literal from this type's serialized representation using the specified protocol version. * Some work is delegated to {@link org.apache.cassandra.serializers.TypeSerializer#toCQLLiteral(ByteBuffer, StringBuilder)}. */ public void toCQLLiteral(ByteBuffer buffer, int version, StringBuilder target); public enum Native implements CQL3Type { ASCII (AsciiType.instance), BIGINT (LongType.instance), BLOB (BytesType.instance), BOOLEAN (BooleanType.instance), COUNTER (CounterColumnType.instance), DATE (SimpleDateType.instance), DECIMAL (DecimalType.instance), DOUBLE (DoubleType.instance), EMPTY (EmptyType.instance), FLOAT (FloatType.instance), INET (InetAddressType.instance), INT (Int32Type.instance), SMALLINT (ShortType.instance), TEXT (UTF8Type.instance), TIME (TimeType.instance), TIMESTAMP (TimestampType.instance), TIMEUUID (TimeUUIDType.instance), TINYINT (ByteType.instance), UUID (UUIDType.instance), VARCHAR (UTF8Type.instance), VARINT (IntegerType.instance); private final AbstractType<?> type; private Native(AbstractType<?> type) { this.type = type; } public boolean isCollection() { return false; } public AbstractType<?> getType() { return type; } /** * Delegate to * {@link org.apache.cassandra.serializers.TypeSerializer#toCQLLiteral(ByteBuffer, StringBuilder)} * for native types as most CQL literal representations work fine with the default * {@link org.apache.cassandra.serializers.TypeSerializer#toString(Object)} * {@link org.apache.cassandra.serializers.TypeSerializer#deserialize(ByteBuffer)} implementations. */ public void toCQLLiteral(ByteBuffer buffer, int version, StringBuilder target) { type.getSerializer().toCQLLiteral(buffer, target); } @Override public String toString() { return super.toString().toLowerCase(); } } public static class Custom implements CQL3Type { private final AbstractType<?> type; public Custom(AbstractType<?> type) { this.type = type; } public Custom(String className) throws SyntaxException, ConfigurationException { this(TypeParser.parse(className)); } public boolean isCollection() { return false; } public AbstractType<?> getType() { return type; } public void toCQLLiteral(ByteBuffer buffer, int version, StringBuilder target) { if (buffer == null) target.append("null"); else target.append(type.getString(buffer)); } @Override public final boolean equals(Object o) { if(!(o instanceof Custom)) return false; Custom that = (Custom)o; return type.equals(that.type); } @Override public final int hashCode() { return type.hashCode(); } @Override public String toString() { return "'" + type + '\''; } } public static class Collection implements CQL3Type { private final CollectionType type; public Collection(CollectionType type) { this.type = type; } public AbstractType<?> getType() { return type; } public boolean isCollection() { return true; } public void toCQLLiteral(ByteBuffer buffer, int version, StringBuilder target) { // Not sure whether the !buffer.hasRemaining() check is correct here or whether an empty // BB should be returned as "[]" resp "{}" or whether it is not valid at all. // // Currently, all empty collections return '[]' or '{}'. Except frozen collections with // a null BB return 'null'. // if (buffer == null || !buffer.hasRemaining()) { if (buffer == null && type.isFrozenCollection()) { target.append("null"); } else { switch (type.kind) { case LIST: target.append("[]"); break; case SET: case MAP: target.append("{}"); break; } } } else { int size = CollectionSerializer.readCollectionSize(buffer, version); switch (type.kind) { case LIST: CQL3Type elements = ((ListType) type).getElementsType().asCQL3Type(); target.append('['); generateSetOrListCQLLiteral(buffer, version, target, size, elements); target.append(']'); break; case SET: elements = ((SetType) type).getElementsType().asCQL3Type(); target.append('{'); generateSetOrListCQLLiteral(buffer, version, target, size, elements); target.append('}'); break; case MAP: target.append('{'); generateMapCQLLiteral(buffer, version, target, size); target.append('}'); break; } } } private void generateMapCQLLiteral(ByteBuffer buffer, int version, StringBuilder target, int size) { CQL3Type keys = ((MapType) type).getKeysType().asCQL3Type(); CQL3Type values = ((MapType) type).getValuesType().asCQL3Type(); for (int i = 0; i < size; i++) { if (i > 0) target.append(", "); ByteBuffer element = CollectionSerializer.readValue(buffer, version); keys.toCQLLiteral(element, version, target); target.append(": "); element = CollectionSerializer.readValue(buffer, version); values.toCQLLiteral(element, version, target); } } private static void generateSetOrListCQLLiteral(ByteBuffer buffer, int version, StringBuilder target, int size, CQL3Type elements) { for (int i = 0; i < size; i++) { if (i > 0) target.append(", "); ByteBuffer element = CollectionSerializer.readValue(buffer, version); elements.toCQLLiteral(element, version, target); } } @Override public final boolean equals(Object o) { if(!(o instanceof Collection)) return false; Collection that = (Collection)o; return type.equals(that.type); } @Override public final int hashCode() { return type.hashCode(); } @Override public String toString() { boolean isFrozen = !this.type.isMultiCell(); StringBuilder sb = new StringBuilder(isFrozen ? "frozen<" : ""); switch (type.kind) { case LIST: AbstractType<?> listType = ((ListType)type).getElementsType(); sb.append("list<").append(listType.asCQL3Type()); break; case SET: AbstractType<?> setType = ((SetType)type).getElementsType(); sb.append("set<").append(setType.asCQL3Type()); break; case MAP: AbstractType<?> keysType = ((MapType)type).getKeysType(); AbstractType<?> valuesType = ((MapType)type).getValuesType(); sb.append("map<").append(keysType.asCQL3Type()).append(", ").append(valuesType.asCQL3Type()); break; default: throw new AssertionError(); } sb.append('>'); if (isFrozen) sb.append('>'); return sb.toString(); } } public static class UserDefined implements CQL3Type { // Keeping this separatly from type just to simplify toString() private final String name; private final UserType type; private UserDefined(String name, UserType type) { this.name = name; this.type = type; } public static UserDefined create(UserType type) { return new UserDefined(UTF8Type.instance.compose(type.name), type); } public boolean isCollection() { return false; } public AbstractType<?> getType() { return type; } public void toCQLLiteral(ByteBuffer buffer, int version, StringBuilder target) { if (buffer == null) { target.append("null"); } else { target.append('{'); for (int i = 0; i < type.size(); i++) { // we allow the input to have less fields than declared so as to support field addition. if (!buffer.hasRemaining()) break; if (buffer.remaining() < 4) throw new MarshalException(String.format("Not enough bytes to read size of %dth field %s", i, type.fieldName(i))); int size = buffer.getInt(); if (i > 0) target.append(", "); target.append(ColumnIdentifier.maybeQuote(type.fieldNameAsString(i))); target.append(": "); // size < 0 means null value if (size < 0) { target.append("null"); continue; } if (buffer.remaining() < size) throw new MarshalException(String.format("Not enough bytes to read %dth field %s", i, type.fieldName(i))); ByteBuffer field = ByteBufferUtil.readBytes(buffer, size); type.fieldType(i).asCQL3Type().toCQLLiteral(field, version, target); } target.append('}'); } } @Override public final boolean equals(Object o) { if(!(o instanceof UserDefined)) return false; UserDefined that = (UserDefined)o; return type.equals(that.type); } @Override public final int hashCode() { return type.hashCode(); } @Override public String toString() { return "frozen<" + ColumnIdentifier.maybeQuote(name) + '>'; } } public static class Tuple implements CQL3Type { private final TupleType type; private Tuple(TupleType type) { this.type = type; } public static Tuple create(TupleType type) { return new Tuple(type); } public boolean isCollection() { return false; } public AbstractType<?> getType() { return type; } public void toCQLLiteral(ByteBuffer buffer, int version, StringBuilder target) { if (buffer == null) { target.append("null"); } else { target.append('('); boolean first = true; for (int i = 0; i < type.size(); i++) { // we allow the input to have less fields than declared so as to support field addition. if (!buffer.hasRemaining()) break; if (buffer.remaining() < 4) throw new MarshalException(String.format("Not enough bytes to read size of %dth component", i)); int size = buffer.getInt(); if (first) first = false; else target.append(", "); // size < 0 means null value if (size < 0) { target.append("null"); continue; } if (buffer.remaining() < size) throw new MarshalException(String.format("Not enough bytes to read %dth component", i)); ByteBuffer field = ByteBufferUtil.readBytes(buffer, size); type.type(i).asCQL3Type().toCQLLiteral(field, version, target); } target.append(')'); } } @Override public final boolean equals(Object o) { if(!(o instanceof Tuple)) return false; Tuple that = (Tuple)o; return type.equals(that.type); } @Override public final int hashCode() { return type.hashCode(); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("frozen<tuple<"); for (int i = 0; i < type.size(); i++) { if (i > 0) sb.append(", "); sb.append(type.type(i).asCQL3Type()); } sb.append(">>"); return sb.toString(); } } // For UserTypes, we need to know the current keyspace to resolve the // actual type used, so Raw is a "not yet prepared" CQL3Type. public abstract class Raw { protected boolean frozen = false; protected abstract boolean supportsFreezing(); public boolean isCollection() { return false; } public boolean isFrozen() { return this.frozen; } public boolean canBeNonFrozen() { return true; } public boolean isCounter() { return false; } public String keyspace() { return null; } public void freeze() throws InvalidRequestException { String message = String.format("frozen<> is only allowed on collections, tuples, and user-defined types (got %s)", this); throw new InvalidRequestException(message); } public CQL3Type prepare(String keyspace) { KeyspaceMetadata ksm = Schema.instance.getKSMetaData(keyspace); if (ksm == null) throw new ConfigurationException(String.format("Keyspace %s doesn't exist", keyspace)); return prepare(keyspace, ksm.types); } public abstract CQL3Type prepare(String keyspace, Types udts) throws InvalidRequestException; public CQL3Type prepareInternal(String keyspace, Types udts) throws InvalidRequestException { return prepare(keyspace, udts); } public boolean referencesUserType(String name) { return false; } public static Raw from(CQL3Type type) { return new RawType(type); } public static Raw userType(UTName name) { return new RawUT(name); } public static Raw map(CQL3Type.Raw t1, CQL3Type.Raw t2) { return new RawCollection(CollectionType.Kind.MAP, t1, t2); } public static Raw list(CQL3Type.Raw t) { return new RawCollection(CollectionType.Kind.LIST, null, t); } public static Raw set(CQL3Type.Raw t) { return new RawCollection(CollectionType.Kind.SET, null, t); } public static Raw tuple(List<CQL3Type.Raw> ts) { return new RawTuple(ts); } public static Raw frozen(CQL3Type.Raw t) throws InvalidRequestException { t.freeze(); return t; } private static class RawType extends Raw { private final CQL3Type type; private RawType(CQL3Type type) { this.type = type; } public CQL3Type prepare(String keyspace, Types udts) throws InvalidRequestException { return type; } protected boolean supportsFreezing() { return false; } public boolean isCounter() { return type == Native.COUNTER; } @Override public String toString() { return type.toString(); } } private static class RawCollection extends Raw { private final CollectionType.Kind kind; private final CQL3Type.Raw keys; private final CQL3Type.Raw values; private RawCollection(CollectionType.Kind kind, CQL3Type.Raw keys, CQL3Type.Raw values) { this.kind = kind; this.keys = keys; this.values = values; } public void freeze() throws InvalidRequestException { if (keys != null && keys.supportsFreezing()) keys.freeze(); if (values != null && values.supportsFreezing()) values.freeze(); frozen = true; } protected boolean supportsFreezing() { return true; } public boolean isCollection() { return true; } public CQL3Type prepare(String keyspace, Types udts) throws InvalidRequestException { return prepare(keyspace, udts, false); } public CQL3Type prepareInternal(String keyspace, Types udts) { return prepare(keyspace, udts, true); } public CQL3Type prepare(String keyspace, Types udts, boolean isInternal) throws InvalidRequestException { assert values != null : "Got null values type for a collection"; if (!frozen && values.supportsFreezing() && !values.frozen) throw new InvalidRequestException("Non-frozen collections are not allowed inside collections: " + this); // we represent Thrift supercolumns as maps, internally, and we do allow counters in supercolumns. Thus, // for internal type parsing (think schema) we have to make an exception and allow counters as (map) values if (values.isCounter() && !isInternal) throw new InvalidRequestException("Counters are not allowed inside collections: " + this); if (keys != null) { if (keys.isCounter()) throw new InvalidRequestException("Counters are not allowed inside collections: " + this); if (!frozen && keys.supportsFreezing() && !keys.frozen) throw new InvalidRequestException("Non-frozen collections are not allowed inside collections: " + this); } switch (kind) { case LIST: return new Collection(ListType.getInstance(values.prepare(keyspace, udts).getType(), !frozen)); case SET: return new Collection(SetType.getInstance(values.prepare(keyspace, udts).getType(), !frozen)); case MAP: assert keys != null : "Got null keys type for a collection"; return new Collection(MapType.getInstance(keys.prepare(keyspace, udts).getType(), values.prepare(keyspace, udts).getType(), !frozen)); } throw new AssertionError(); } public boolean referencesUserType(String name) { return (keys != null && keys.referencesUserType(name)) || values.referencesUserType(name); } @Override public String toString() { String start = frozen? "frozen<" : ""; String end = frozen ? ">" : ""; switch (kind) { case LIST: return start + "list<" + values + '>' + end; case SET: return start + "set<" + values + '>' + end; case MAP: return start + "map<" + keys + ", " + values + '>' + end; } throw new AssertionError(); } } private static class RawUT extends Raw { private final UTName name; private RawUT(UTName name) { this.name = name; } public String keyspace() { return name.getKeyspace(); } public void freeze() { frozen = true; } public boolean canBeNonFrozen() { return false; } public CQL3Type prepare(String keyspace, Types udts) throws InvalidRequestException { if (name.hasKeyspace()) { // The provided keyspace is the one of the current statement this is part of. If it's different from the keyspace of // the UTName, we reject since we want to limit user types to their own keyspace (see #6643) if (!keyspace.equals(name.getKeyspace())) throw new InvalidRequestException(String.format("Statement on keyspace %s cannot refer to a user type in keyspace %s; " + "user types can only be used in the keyspace they are defined in", keyspace, name.getKeyspace())); } else { name.setKeyspace(keyspace); } UserType type = udts.getNullable(name.getUserTypeName()); if (type == null) throw new InvalidRequestException("Unknown type " + name); if (!frozen) throw new InvalidRequestException("Non-frozen User-Defined types are not supported, please use frozen<>"); return new UserDefined(name.toString(), type); } public boolean referencesUserType(String name) { return this.name.getStringTypeName().equals(name); } protected boolean supportsFreezing() { return true; } @Override public String toString() { return name.toString(); } } private static class RawTuple extends Raw { private final List<CQL3Type.Raw> types; private RawTuple(List<CQL3Type.Raw> types) { this.types = types; } protected boolean supportsFreezing() { return true; } public boolean isCollection() { return false; } public void freeze() throws InvalidRequestException { for (CQL3Type.Raw t : types) if (t.supportsFreezing()) t.freeze(); frozen = true; } public CQL3Type prepare(String keyspace, Types udts) throws InvalidRequestException { if (!frozen) freeze(); List<AbstractType<?>> ts = new ArrayList<>(types.size()); for (CQL3Type.Raw t : types) { if (t.isCounter()) throw new InvalidRequestException("Counters are not allowed inside tuples"); ts.add(t.prepare(keyspace, udts).getType()); } return new Tuple(new TupleType(ts)); } public boolean referencesUserType(String name) { return types.stream().anyMatch(t -> t.referencesUserType(name)); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("tuple<"); for (int i = 0; i < types.size(); i++) { if (i > 0) sb.append(", "); sb.append(types.get(i)); } sb.append('>'); return sb.toString(); } } } }
package com.alexvasilkov.android.commons.prefs; import android.content.SharedPreferences; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.util.Base64; import com.alexvasilkov.android.commons.utils.GsonHelper; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.lang.reflect.Type; import java.util.Date; /** * Helper methods to store additional types of values to {@link SharedPreferences}. */ @SuppressWarnings({ "WeakerAccess", "unused" }) // Public API public class PreferencesHelper { public static final String DEFAULT_DELIMITER = ","; /** * Converts and stores double value as long. */ @NonNull public static SharedPreferences.Editor putDouble(@NonNull SharedPreferences.Editor editor, @NonNull String key, double value) { editor.putLong(key, Double.doubleToLongBits(value)); return editor; } /** * Retrieves double value stored as long. */ public static double getDouble(@NonNull SharedPreferences prefs, @NonNull String key, double defaultValue) { long bits = prefs.getLong(key, Double.doubleToLongBits(defaultValue)); return Double.longBitsToDouble(bits); } /** * Stores given date as long value. {@link Long#MIN_VALUE} is used if date is <code>null</code>. */ @NonNull public static SharedPreferences.Editor putDate(@NonNull SharedPreferences.Editor editor, @NonNull String key, @Nullable Date value) { editor.putLong(key, value == null ? Long.MIN_VALUE : value.getTime()); return editor; } /** * Retrieves date value stored as long. */ @Nullable public static Date getDate(@NonNull SharedPreferences prefs, @NonNull String key) { long time = prefs.getLong(key, Long.MIN_VALUE); return time == Long.MIN_VALUE ? null : new Date(time); } /** * Stores strings array as single string. * * @param delimiter Delimiter used for strings concatination. */ @NonNull public static SharedPreferences.Editor putStringArray(@NonNull SharedPreferences.Editor editor, @NonNull String key, @Nullable String[] values, @NonNull String delimiter) { editor.putString(key, concat(values, delimiter)); return editor; } /** * Stores strings array as single string. Uses {@link #DEFAULT_DELIMITER} as delimiter. */ @NonNull public static SharedPreferences.Editor putStringArray(@NonNull SharedPreferences.Editor editor, @NonNull String key, @Nullable String[] values) { return putStringArray(editor, key, values, DEFAULT_DELIMITER); } /** * Retrieves strings array stored as single string. * * @param delimiter Delimiter used to split the string. */ @Nullable public static String[] getStringArray(@NonNull SharedPreferences prefs, @NonNull String key, @NonNull String delimiter) { return split(prefs.getString(key, null), delimiter); } /** * Retrieves strings array stored as single string. * Uses {@link #DEFAULT_DELIMITER} as delimiter. */ @Nullable public static String[] getStringArray(@NonNull SharedPreferences prefs, @NonNull String key) { return getStringArray(prefs, key, DEFAULT_DELIMITER); } /** * Stores serializable object as BASE_64 encoded string. */ @NonNull public static SharedPreferences.Editor putSerializable(@NonNull SharedPreferences.Editor editor, @NonNull String key, @Nullable Serializable obj) { editor.putString(key, serialize(obj)); return editor; } /** * Retrieves serializable object stored as BASE_64 encoded string. */ @Nullable public static Serializable getSerializable(@NonNull SharedPreferences prefs, @NonNull String key) { return deserialize(prefs.getString(key, null)); } /** * Stores object as json encoded string. * Gson library should be available in classpath. */ @NonNull public static SharedPreferences.Editor putJson(@NonNull SharedPreferences.Editor editor, @NonNull String key, @Nullable Object obj) { editor.putString(key, GsonHelper.toJson(obj)); return editor; } /** * Retrieves object stored as json encoded string. * Gson library should be available in classpath. */ @Nullable public static <T> T getJson(@NonNull SharedPreferences prefs, @NonNull String key, @NonNull Class<T> clazz) { return getJson(prefs, key, (Type) clazz); } /** * Retrieves object stored as json encoded string. * Gson library should be available in classpath. */ @Nullable public static <T> T getJson(@NonNull SharedPreferences prefs, @NonNull String key, @NonNull Type type) { return GsonHelper.fromJson(prefs.getString(key, null), type); } /* Helper methods */ @Nullable private static String concat(@Nullable String[] values, @NonNull String delimeter) { if (values == null || values.length == 0) { return null; } final StringBuilder str = new StringBuilder(); for (String val : values) { str.append(val).append(delimeter); } str.delete(str.length() - delimeter.length(), str.length()); return str.toString(); } @Nullable private static String[] split(@Nullable String value, @NonNull String delimeter) { return value == null ? null : value.split(delimeter); } @Nullable private static String serialize(@Nullable Serializable obj) { if (obj == null) { return null; } try { ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); ObjectOutputStream out = new ObjectOutputStream(byteOut); out.writeObject(obj); out.close(); return Base64.encodeToString(byteOut.toByteArray(), Base64.DEFAULT); } catch (IOException e) { e.printStackTrace(); return null; } } @Nullable private static Serializable deserialize(@Nullable String serialized) { if (serialized == null) { return null; } try { ByteArrayInputStream byteIn = new ByteArrayInputStream( Base64.decode(serialized, Base64.DEFAULT)); ObjectInputStream in = new ObjectInputStream(byteIn); Serializable obj = (Serializable) in.readObject(); in.close(); return obj; } catch (IOException e) { e.printStackTrace(); return null; } catch (ClassNotFoundException e) { e.printStackTrace(); return null; } } private PreferencesHelper() {} }
/* Copyright 2012, Jernej Kovacic Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.jkovacic.ssh2; import com.jkovacic.cli.*; import com.jkovacic.cryptoutil.*; import java.io.*; import java.util.*; import ch.ethz.ssh2.*; /** * Implementation of Ssh2 using an open source (BSD style license) * library GanymedSSH2. More info: http://www.cleondris.ch/opensource/ssh2/ * * This implementation was developed on build 250. * * For public key user authentication, Ganymed SSH2 only supports RSA and DSA keys. * * @author Jernej Kovacic */ public final class SshGanymed extends Ssh2 { // Available algorithms, available via static methods of Connection: private static final String[] AVAILABLE_KEX_ALGS = { "diffie-hellman-group-exchange-sha1", "diffie-hellman-group14-sha1", "diffie-hellman-group1-sha1" }; private static final String[] AVAILABLE_HMAC_ALGS = Connection.getAvailableMACs(); private static final String[] AVAILABLE_CIPHER_ALGS = Connection.getAvailableCiphers(); private static final String[] AVAILABLE_PK_ALGS = Connection.getAvailableServerHostKeyAlgorithms(); // The library does not support compression private static final String[] AVAILABLE_COMP_ALGS = { "none" }; // Ganymed SSH connection context private Connection sshconn = null; /* * Constructor * * @param host - a class with SSH server data * @param user - user's data needed for authentication * @param algorithms - selected encryption algorithms */ SshGanymed(HostId host, UserCredentials user, EncryptionAlgorithms algorithms) { super(host, user, algorithms); // Supported algorithms, available via static methods of Connection: /* As evident from the source code, Ganymed SSH supports the following key exchange algorithms. However it does not allow to choose preferred algorithms. For that reason we will just check if any user selected KEX algorithm matches the available ones. */ availableKexAlgs = AVAILABLE_KEX_ALGS; // Hmac algorithms supported by the library availableHmacAlgs = AVAILABLE_HMAC_ALGS; // Cipher algorithms supported by the library availableCipherAlgs = AVAILABLE_CIPHER_ALGS; // Public key algorithms supported by the library availablePublickeyAlgs = AVAILABLE_PK_ALGS; // The library does not support compression availableCompAlgs = AVAILABLE_COMP_ALGS; } /* * Performs password based authentication * * @throws IOException (thrown by the library) if authentication fails */ private boolean authenticate(UserCredentialsPassword user) throws IOException { // the exception that will be thrown if necessary IOException exthr = null; // GanymedSSH2 requires a password as a String. // However, String is immutable so it is not possible to // overwrite its characters when they are not needed anymore. // Least we can do is to use StringBuffer (which is mutable) // and override its characters as soon as not needed anymore. StringBuilder password = new StringBuilder(user.secret.length); // authentication successful? boolean authSucc = false; try { // copy password characters into a String buffer for ( byte b : user.secret ) { password.append((char) b); } // And attempt to authenticate authSucc = sshconn.authenticateWithPassword(user.username, password.toString()); } catch ( IOException ex ) { exthr = ex; } finally { // before proceeding override the password with 'zero' characters for ( int i=0; i<user.secret.length; i++ ) { password.setCharAt(i, '\u0000'); } } // Has an exception been intercepted? Rethrow it if ( null!=exthr ) { throw exthr; } // and return the authentication success return authSucc; } /* * Performs public key based authentication * * @throws IOException (thrown by the library) if authentication fails */ private boolean authenticate(UserCredentialsPrivateKey user) throws IOException { String header = null; String footer = null; // the exception that will be thrown if necessary IOException exthr = null; // authentication successful? boolean authSucc = false; // storage for private key char[] pem = null; try { // GanymedSSH2 requires a PEM encoded private key. // Class Base64 has all appropriate functionality to prepare it. // First check if the algorithm is supported and // prepare the appropriate header and footer switch (user.getMethod()) { case DSA: header = "-----BEGIN DSA PRIVATE KEY-----"; footer = "-----END DSA PRIVATE KEY-----"; break; case RSA: header = "-----BEGIN RSA PRIVATE KEY-----"; footer = "-----END RSA PRIVATE KEY-----"; break; default: throw new IOException("Unsupported public key algorithm"); } // ... and prepare a PEM structure. // Note that the library is flexible about the line length and line separators pem = Base64.encode(user.secret, 64, "\n", header, footer); // And attempt to authenticate authSucc = sshconn.authenticateWithPublicKey(user.username, pem, null); } catch ( IOException ex ) { exthr = ex; } finally { // cleanup pem when not needed anymore!!!! if ( null!=pem ) { Arrays.fill(pem, '\u0000'); } } // Has an exception been intercepted? Rethrow it if ( null!=exthr ) { throw exthr; } return authSucc; } /** * Establish a connection to a SSH server * * @throws SshException if the connection fails for any reason */ public void connect() throws SshException { // thoroughly check all settings shortlistAndCheckAlgorithms(); checkUserSettings(); checkDestHostSettings(); // does an instance of Connection already exist? if ( null != sshconn && true == isConnected ) { // a connection already exists, nothing to do return; } try { // if an inactive connection exists, destroy it and // create a new one sshconn = null; isConnected = false; // Instantiate a Connection class... sshconn = new Connection(destination.hostname, destination.port); // and set user selected algorithms (where the library allows it) sshconn.setClient2ServerCiphers(cipherAlgs); sshconn.setServer2ClientCiphers(cipherAlgs); sshconn.setClient2ServerMACs(hmacAlgs); sshconn.setServer2ClientMACs(hmacAlgs); // GanymedSSH API does not allow to set KEX and compression algorithms // finally try to establish the connection sshconn.connect(new StrictHostkeyVerifier(destination.hostkeys)); } catch ( IOException ex ) { // Something failed throw new SshException("SSH connection failed: '" + ex.getMessage() + "'"); } try { // If reaching this point, the connection is established and key exchange was successful. // Now attempt to authenticate boolean userAuthSucc = false; // the auth. method depends on actual credentials if ( UserCredentialsPassword.class == user.getClass() ) { // is this auth. method available? if ( false == sshconn.isAuthMethodAvailable(user.username, "password") ) { throw new SshException("Password authentication not available for '" + user.username + "'"); } userAuthSucc = authenticate((UserCredentialsPassword) user); } else if ( UserCredentialsPrivateKey.class == user.getClass() ) { // is this auth. method available? if ( false == sshconn.isAuthMethodAvailable(user.username, "publickey") ) { throw new SshException("Public key authentication not available for '" + user.username + "'"); } userAuthSucc = authenticate((UserCredentialsPrivateKey) user); } // 'else' not necessary as userAuthSucc will be false and an exception will be thrown if ( false == userAuthSucc ) { // Authentication failed throw new SshException("SSH authentication unsuccessful"); } if ( false == sshconn.isAuthenticationComplete() ) { throw new SshException("User authentication still incomplete"); } } catch ( IOException ex ) { // thrown by GanymedSSH2 methods throw new SshException("SSH authentication failed: '" + ex.getMessage() + "'"); } // set the internal state isConnected = true; } /** * Terminate the SSH connection * * @throws SshException if it fails */ public void disconnect() throws SshException { if ( null != sshconn ) { sshconn.close(); } // Connection data not needed anymore, let the GC cleanup the structures: sshconn = null; isConnected = false; } /** * Execute a command over SSH 'exec' * * Note that some SSH servers may not return the remote process's exit code. * CliOutput.EXITCODE_NOT_SET is set in such cases. * * @param processor - a class that will process the command's outputs * @param command - full command to execute, given as one line * * @return an instance of CliOutput with results of the executed command * * @throws CliException when execution fails for any reason */ public CliOutput exec(ICliProcessor processor, String command) throws SshException { CliOutput retVal = null; Session sess = null; // check of input parameters if ( null == command || 0 == command.length() ) { throw new SshException("No command specified"); } // is connection established if ( null == sshconn || false == isConnected ) { throw new SshException("SSH connection not established"); } try { // Note that channel is called a "Session" by GanymedSSH2 // Its API for execution of commands is very simple sess = sshconn.openSession(); sess.execCommand(command); // get output try { retVal = processor.process(sess.getStdin(), new StreamGobbler(sess.getStdout()), new StreamGobbler(sess.getStderr()) ); } catch ( CliException ex ) { throw new SshException("Processing of output streams failed: " + ex.getMessage()); } // wait until the command execution completes sess.waitForCondition( ChannelCondition.CLOSED & ChannelCondition.EXIT_SIGNAL & ChannelCondition.EXIT_STATUS, 0); // command exit status is returned as Integer Integer exitStatus = sess.getExitStatus(); // some SSH servers may not return it, set to 0 in this case if ( null == exitStatus ) { retVal.exitCode = CliOutput.EXITCODE_NOT_SET; // default value when not available } else { retVal.exitCode = exitStatus.intValue(); } // and finally close the session sess.close(); } catch ( IOException ex ) { throw new SshException("Could not establish a SSH exec channel"); } return retVal; } /* * Destructor. * * It disconnects from a SSH server if the connection is still active */ protected void finalize() throws Throwable { try { disconnect(); } finally { super.finalize(); } } /** * @return list of key exchange algorithms supported by the library */ public static String[] availableKexAlgorithms() { return AVAILABLE_KEX_ALGS; } /** * @return list of symmetric cipher algorithms supported by the library */ public static String[] availableCipherAlgorithms() { return AVAILABLE_CIPHER_ALGS; } /** * @return list of message integrity algorithms supported by the library */ public static String[] availableHmacAlgorithms() { return AVAILABLE_HMAC_ALGS; } /** * @return list of asymmetric encryption algorithms supported by the library */ public static String[] availablePublicKeyAlgorithms() { return AVAILABLE_PK_ALGS; } /** * @return list of compression algorithms supported by the library */ public static String[] availableCompressionAlgorithms() { return AVAILABLE_COMP_ALGS; } /* * An internal class implementing GanymedSSH2's interface ServerHostKeyVerifier. * It is required by the library when authenticating a SSH server. * * This class compares either public keys or their hashes (MD5 or Bubble Babble), * depending on the HostkeyType of each known host key. * * As the application is designed for automated sessions, very strict check * is implemented, i.e. host keys or their hot finger prints must match any of user specified keys. * In any other case (a different host key, host key not found yet), the session will fail. */ class StrictHostkeyVerifier implements ServerHostKeyVerifier { HostkeyVerifier verifier = null; /* * constructor that initializes the internal list of host keys * * @param keylist */ StrictHostkeyVerifier(List<Hostkey> keylist) { verifier = new HostkeyVerifier(keylist); } /* * Implementation of the SeverHostKeyVerifier's (an interface provided by the GanymedSSH2 library) * only declared method. See Ganymed documentation for more info. * * @see ch.ethz.ssh2.ServerHostKeyVerifier#verifyServerHostKey(java.lang.String, int, java.lang.String, byte[]) */ public boolean verifyServerHostKey(String hostname, int port, String serverHostKeyAlgorithm, byte[] serverHostKey) throws Exception { // As the external host verifying class already contains keys // for the right destination hosts, // hostname and port will be ignored return verifier.strictVerify(serverHostKeyAlgorithm, serverHostKey); } } }
package com.cardshifter.server.main; import java.io.File; import java.io.IOException; import java.net.UnknownHostException; import java.nio.file.Paths; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.function.Predicate; import com.cardshifter.api.outgoing.*; import org.apache.log4j.PropertyConfigurator; import org.junit.After; import org.junit.Before; import org.junit.Test; import com.cardshifter.ai.AIs; import com.cardshifter.ai.ScoringAI; import com.cardshifter.api.ClientIO; import com.cardshifter.api.both.ChatMessage; import com.cardshifter.api.both.PlayerConfigMessage; import com.cardshifter.api.incoming.LoginMessage; import com.cardshifter.api.incoming.ServerQueryMessage; import com.cardshifter.api.incoming.ServerQueryMessage.Request; import com.cardshifter.api.incoming.StartGameRequest; import com.cardshifter.api.incoming.UseAbilityMessage; import com.cardshifter.api.outgoing.UserStatusMessage.Status; import com.cardshifter.core.game.ServerGame; import com.cardshifter.core.game.TCGGame; import com.cardshifter.modapi.actions.ECSAction; import com.cardshifter.modapi.ai.AIComponent; import com.cardshifter.modapi.ai.CardshifterAI; import com.cardshifter.modapi.base.ECSGameState; import com.cardshifter.modapi.base.Entity; import com.cardshifter.modapi.base.PlayerComponent; import com.cardshifter.server.model.MainServer; import com.cardshifter.server.model.Server; import static org.junit.Assert.*; public class ServerConnectionTest { private String getTestMod() { return mods.getMods()[0]; } private TestClient createTestClient() throws IOException { return new TestClient(socketPort); } private Server server; private int socketPort; private TestClient client1; private int userId; private AvailableModsMessage mods; @Before public void before() throws IOException, InterruptedException { startServer(new Server()); } private void startServer(Server serverInst) throws IOException, InterruptedException { PropertyConfigurator.configure(getClass().getResourceAsStream("log4j.properties")); ServerConfiguration config = ServerConfiguration.defaults(); // Use any available port config.setPortSocket(0); config.setPortWebsocket(0); MainServer main = new MainServer(config, serverInst); main.getMods().loadExternal(Paths.get("../extra-resources/groovy")); server = main.start(); assertTrue("Server should start correctly.", server.getClients().size() > 0); socketPort = config.getPortSocket(); client1 = createTestClient(); client1.send(new LoginMessage("Tester1")); WelcomeMessage welcome = client1.await(WelcomeMessage.class); System.out.println(server.getClients()); assertEquals(server.getClients().size() + 1, welcome.getUserId()); userId = welcome.getUserId(); client1.await(ChatMessage.class); mods = client1.await(AvailableModsMessage.class); assertNotEquals("No mods found in " + new File("").getAbsolutePath(), 0, mods.getMods().length); } @After public void shutdown() throws InterruptedException { try { client1.disconnect(); } catch (IOException e) { } server.stop(); } @Test(timeout = 20000) public void testUserOnlineOffline() throws InterruptedException, UnknownHostException, IOException { TestClient client2 = createTestClient(); client2.send(new LoginMessage("Test2")); client2.await(WelcomeMessage.class); client2.await(ChatMessage.class); UserStatusMessage statusMessage = client1.await(UserStatusMessage.class); ChatMessage chat = client1.await(ChatMessage.class); String message = chat.getMessage(); assertTrue("Unexpected message: " + message, message.contains(client2.getName()) && message.contains("joined")); int client2id = statusMessage.getUserId(); assertEquals(Status.ONLINE, statusMessage.getStatus()); assertEquals(server.getClients().size() + 1, client2id); assertEquals(client2.getName(), statusMessage.getName()); client2.send(new ServerQueryMessage(Request.USERS)); client2.await(AvailableModsMessage.class); List<UserStatusMessage> users = client2.awaitMany(6, UserStatusMessage.class); System.out.println("Online users: " + users); // There is no determined order in which the UserStatusMessages are received, so it is harder to make any assertions. assertUserFound(users, client1.getName()); assertUserFound(users, client2.getName()); assertUserFound(users, "AI Fighter"); assertUserFound(users, "AI Loser"); assertUserFound(users, "AI Medium"); assertUserFound(users, "AI Idiot"); client2.disconnect(); System.out.println(chat); statusMessage = client1.await(UserStatusMessage.class); assertEquals(Status.OFFLINE, statusMessage.getStatus()); assertEquals(client2id, statusMessage.getUserId()); assertEquals(client2.getName(), statusMessage.getName()); } @Test(timeout = 5000) public void testSameUserName() throws IOException, InterruptedException { TestClient client2 = createTestClient(); client2.send(new LoginMessage(client1.getName())); ErrorMessage message = client2.await(ErrorMessage.class); assertEquals(message.getMessage(), "User name already in use by another client"); } private static void assertUserFound(Collection<UserStatusMessage> users, String name) { assertTrue("User '" + name + "' not found", users.stream().filter(mess -> mess.getName().equals(name)).findAny().isPresent()); } @Test(timeout = 50000) public void testStartGame() throws InterruptedException, IOException { client1.send(new StartGameRequest(2, getTestMod())); NewGameMessage gameMessage = client1.await(NewGameMessage.class); assertEquals(1, gameMessage.getGameId()); client1.awaitUntil(PlayerConfigMessage.class); TCGGame game = (TCGGame) server.getGames().get(1); assertEquals(2, game.getGameModel().getEntitiesWithComponent(PlayerComponent.class).size()); assertTrue(game.hasPlayer(server.getClients().get(userId))); assertTrue(game.hasPlayer(server.getClients().get(2))); game.incomingPlayerConfig(new PlayerConfigMessage(game.getId(), getTestMod(), new HashMap<>()), server.getClients().get(2)); game.incomingPlayerConfig(new PlayerConfigMessage(game.getId(), getTestMod(), new HashMap<>()), server.getClients().get(userId)); client1.awaitUntil(ResetAvailableActionsMessage.class); assertEquals(ECSGameState.RUNNING, game.getState()); } @Test(timeout = 100000) public void testPlayGame() throws InterruptedException, IOException { testPlayAny(); client1.awaitUntil(PlayerConfigMessage.class); TCGGame game = (TCGGame) server.getGames().get(1); ClientIO io = server.getClients().get(userId); assertEquals(2, game.getGameModel().getEntitiesWithComponent(PlayerComponent.class).size()); game.incomingPlayerConfig(new PlayerConfigMessage(game.getId(), getTestMod(), new HashMap<>()), io); assertEquals(ECSGameState.RUNNING, game.getGameModel().getGameState()); Entity human = game.playerFor(io); Entity ai = game.getGameModel().getEntitiesWithComponent(AIComponent.class).stream().findFirst().get(); ai.getComponent(AIComponent.class).setDelay(0); CardshifterAI humanActions = new ScoringAI(AIs.medium()); client1.awaitUntil(ResetAvailableActionsMessage.class); client1.awaitUntil(ResetAvailableActionsMessage.class); client1.awaitUntil(UsableActionMessage.class); while (!game.isGameOver()) { ECSAction action = humanActions.getAction(human); if (action != null) { System.out.println("Perform " + action); int[] targets = new int[]{ }; if (!action.getTargetSets().isEmpty()) { targets = action.getTargetSets().get(0).getChosenTargets().stream().mapToInt(e -> e.getId()).toArray(); } UseAbilityMessage message = new UseAbilityMessage(game.getId(), action.getOwner().getId(), action.getName(), targets); System.out.println("Sending message: " + message); client1.send(message); client1.awaitUntil(ResetAvailableActionsMessage.class); } else { System.out.println("Nothing to perform, busy-loop"); } } } @Test(timeout = 10000) public void testPlayAny() throws InterruptedException, IOException { Predicate<ClientIO> opponentFilter = client -> client.getName().equals("AI Loser"); server.getIncomingHandler().perform(new StartGameRequest(-1, getTestMod()), server.getClients().values().stream().filter(opponentFilter).findAny().get()); client1.send(new StartGameRequest(-1, getTestMod())); NewGameMessage gameMessage = client1.await(NewGameMessage.class); assertEquals(1, gameMessage.getGameId()); ServerGame game = server.getGames().get(1); assertTrue(game.hasPlayer(server.getClients().get(userId))); } @Test(timeout = 10000) public void testOnlyOneInvite() throws IOException, InterruptedException { TestClient client2 = createTestClient(); client2.send(new LoginMessage("client2")); WelcomeMessage welcomeMessage = client2.await(WelcomeMessage.class); int client2id = welcomeMessage.getUserId(); client1.await(UserStatusMessage.class); client1.await(ChatMessage.class); client1.send(new StartGameRequest(client2id, getTestMod())); client1.send(new StartGameRequest(client2id, getTestMod())); client1.await(ErrorMessage.class); } /** * Assert that no message is in the queue or being sent from the server. * DOES NOT WORK IF THE MESSAGE IN THE QUEUE IS ServerStatusMessage! */ private static void assertNoMessage(TestClient client) throws IOException, InterruptedException { client.send(new ServerQueryMessage(Request.STATUS, "")); client.await(ServerStatusMessage.class); } @Test(timeout = 5000) public void testUsersQueryNotLoggedIn() throws IOException, InterruptedException { TestClient client2 = createTestClient(); client1.send(new ServerQueryMessage(Request.USERS, "")); List<UserStatusMessage> users = client1.awaitMany(5, UserStatusMessage.class); assertUserFound(users, client1.getName()); assertUserFound(users, "AI Fighter"); assertUserFound(users, "AI Loser"); assertUserFound(users, "AI Medium"); assertUserFound(users, "AI Idiot"); // There shouldn't be a UserStatusMessage for client2 assertNoMessage(client1); } @Test(timeout = 5000) public void testNoOfflineMessageIfNotLoggedIn() throws IOException, InterruptedException { /* There's a race condition between a possible UserStatusMessage (if the test were to fail) and the * message sent by the server in response to the request from assertNoMessage. Therefore wait until * the server has processed the disconnection before continuing. * * See https://github.com/Cardshifter/Cardshifter/pull/410#discussion-diff-40526060 for discussion. */ shutdown(); class TestServer extends Server { private CountDownLatch latch; @Override public void onDisconnected(ClientIO client) { super.onDisconnected(client); if (latch != null) { latch.countDown(); } } public void setLatch(CountDownLatch latch) { this.latch = latch; } } CountDownLatch disconnectionLatch = new CountDownLatch(1); startServer(new TestServer()); ((TestServer) server).setLatch(disconnectionLatch); TestClient client2 = createTestClient(); client2.disconnect(); disconnectionLatch.await(); assertNoMessage(client1); } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.ecs.model; import java.io.Serializable; /** * */ public class DescribeServicesResult implements Serializable, Cloneable { /** * <p> * The list of services described. * </p> */ private com.amazonaws.internal.SdkInternalList<Service> services; /** * <p> * Any failures associated with the call. * </p> */ private com.amazonaws.internal.SdkInternalList<Failure> failures; /** * <p> * The list of services described. * </p> * * @return The list of services described. */ public java.util.List<Service> getServices() { if (services == null) { services = new com.amazonaws.internal.SdkInternalList<Service>(); } return services; } /** * <p> * The list of services described. * </p> * * @param services * The list of services described. */ public void setServices(java.util.Collection<Service> services) { if (services == null) { this.services = null; return; } this.services = new com.amazonaws.internal.SdkInternalList<Service>( services); } /** * <p> * The list of services described. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setServices(java.util.Collection)} or * {@link #withServices(java.util.Collection)} if you want to override the * existing values. * </p> * * @param services * The list of services described. * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeServicesResult withServices(Service... services) { if (this.services == null) { setServices(new com.amazonaws.internal.SdkInternalList<Service>( services.length)); } for (Service ele : services) { this.services.add(ele); } return this; } /** * <p> * The list of services described. * </p> * * @param services * The list of services described. * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeServicesResult withServices( java.util.Collection<Service> services) { setServices(services); return this; } /** * <p> * Any failures associated with the call. * </p> * * @return Any failures associated with the call. */ public java.util.List<Failure> getFailures() { if (failures == null) { failures = new com.amazonaws.internal.SdkInternalList<Failure>(); } return failures; } /** * <p> * Any failures associated with the call. * </p> * * @param failures * Any failures associated with the call. */ public void setFailures(java.util.Collection<Failure> failures) { if (failures == null) { this.failures = null; return; } this.failures = new com.amazonaws.internal.SdkInternalList<Failure>( failures); } /** * <p> * Any failures associated with the call. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setFailures(java.util.Collection)} or * {@link #withFailures(java.util.Collection)} if you want to override the * existing values. * </p> * * @param failures * Any failures associated with the call. * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeServicesResult withFailures(Failure... failures) { if (this.failures == null) { setFailures(new com.amazonaws.internal.SdkInternalList<Failure>( failures.length)); } for (Failure ele : failures) { this.failures.add(ele); } return this; } /** * <p> * Any failures associated with the call. * </p> * * @param failures * Any failures associated with the call. * @return Returns a reference to this object so that method calls can be * chained together. */ public DescribeServicesResult withFailures( java.util.Collection<Failure> failures) { setFailures(failures); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getServices() != null) sb.append("Services: " + getServices() + ","); if (getFailures() != null) sb.append("Failures: " + getFailures()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DescribeServicesResult == false) return false; DescribeServicesResult other = (DescribeServicesResult) obj; if (other.getServices() == null ^ this.getServices() == null) return false; if (other.getServices() != null && other.getServices().equals(this.getServices()) == false) return false; if (other.getFailures() == null ^ this.getFailures() == null) return false; if (other.getFailures() != null && other.getFailures().equals(this.getFailures()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getServices() == null) ? 0 : getServices().hashCode()); hashCode = prime * hashCode + ((getFailures() == null) ? 0 : getFailures().hashCode()); return hashCode; } @Override public DescribeServicesResult clone() { try { return (DescribeServicesResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tom_roush.pdfbox.pdmodel.interactive.digitalsignature; import java.util.ArrayList; import java.util.List; import com.tom_roush.pdfbox.cos.COSArray; import com.tom_roush.pdfbox.cos.COSBase; import com.tom_roush.pdfbox.cos.COSDictionary; import com.tom_roush.pdfbox.cos.COSName; import com.tom_roush.pdfbox.pdmodel.common.COSArrayList; import com.tom_roush.pdfbox.pdmodel.common.COSObjectable; /** * This represents a pdf signature seed value dictionary. * * @author Thomas Chojecki */ public class PDSeedValue implements COSObjectable { /** * A Ff flag. */ public static final int FLAG_FILTER = 1; /** * A Ff flag. */ public static final int FLAG_SUBFILTER = 1 << 1; /** * A Ff flag. */ public static final int FLAG_V = 1 << 2; /** * A Ff flag. */ public static final int FLAG_REASON = 1 << 3; /** * A Ff flag. */ public static final int FLAG_LEGAL_ATTESTATION = 1 << 4; /** * A Ff flag. */ public static final int FLAG_ADD_REV_INFO = 1 << 5; /** * A Ff flag. */ public static final int FLAG_DIGEST_METHOD = 1 << 6; private final COSDictionary dictionary; /** * Default constructor. */ public PDSeedValue() { dictionary = new COSDictionary(); dictionary.setItem(COSName.TYPE, COSName.SV); dictionary.setDirect(true); // the specification claim to use direct objects } /** * Constructor. * * @param dict The signature dictionary. */ public PDSeedValue(COSDictionary dict) { dictionary = dict; dictionary.setDirect(true); // the specification claim to use direct objects } /** * Convert this standard java object to a COS dictionary. * * @return The COS dictionary that matches this Java object. */ @Override public COSDictionary getCOSObject() { return dictionary; } /** * * @return true if the Filter is required */ public boolean isFilterRequired() { return getCOSObject().getFlag( COSName.FF, FLAG_FILTER); } /** * set true if the filter shall be required. * * @param flag if true, the specified Filter shall be used when signing. */ public void setFilterRequired(boolean flag) { getCOSObject().setFlag( COSName.FF, FLAG_FILTER, flag); } /** * * @return true if the SubFilter is required */ public boolean isSubFilterRequired() { return getCOSObject().getFlag( COSName.FF, FLAG_SUBFILTER); } /** * set true if the subfilter shall be required. * * @param flag if true, the first supported SubFilter in the array shall be used when signing. */ public void setSubFilterRequired(boolean flag) { getCOSObject().setFlag( COSName.FF, FLAG_SUBFILTER, flag); } /** * * @return true if the DigestMethod is required */ public boolean isDigestMethodRequired() { return getCOSObject().getFlag( COSName.FF, FLAG_DIGEST_METHOD); } /** * set true if the DigestMethod shall be required. * * @param flag if true, one digest from the array shall be used. */ public void setDigestMethodRequired(boolean flag) { getCOSObject().setFlag( COSName.FF, FLAG_DIGEST_METHOD, flag); } /** * * @return true if the V entry is required */ public boolean isVRequired() { return getCOSObject().getFlag( COSName.FF, FLAG_V); } /** * set true if the V entry shall be required. * * @param flag if true, the V entry shall be used. */ public void setVRequired(boolean flag) { getCOSObject().setFlag( COSName.FF, FLAG_V, flag); } /** * * @return true if the Reason is required */ public boolean isReasonRequired() { return getCOSObject().getFlag( COSName.FF, FLAG_REASON); } /** * set true if the Reason shall be required. * * @param flag if true, the Reason entry shall be used. */ public void setReasonRequired(boolean flag) { getCOSObject().setFlag( COSName.FF, FLAG_REASON, flag); } /** * * @return true if the LegalAttestation is required */ public boolean isLegalAttestationRequired() { return getCOSObject().getFlag( COSName.FF, FLAG_LEGAL_ATTESTATION); } /** * set true if the LegalAttestation shall be required. * * @param flag if true, the LegalAttestation entry shall be used. */ public void setLegalAttestationRequired(boolean flag) { getCOSObject().setFlag( COSName.FF, FLAG_LEGAL_ATTESTATION, flag); } /** * * @return true if the AddRevInfo is required */ public boolean isAddRevInfoRequired() { return getCOSObject().getFlag( COSName.FF, FLAG_ADD_REV_INFO); } /** * set true if the AddRevInfo shall be required. * * @param flag if true, the AddRevInfo shall be used. */ public void setAddRevInfoRequired(boolean flag) { getCOSObject().setFlag( COSName.FF, FLAG_ADD_REV_INFO, flag); } /** * If <b>Filter</b> is not null and the {@link #isFilterRequired()} indicates this entry is a * required constraint, then the signature handler specified by this entry shall be used when * signing; otherwise, signing shall not take place. If {@link #isFilterRequired()} indicates * that this is an optional constraint, this handler may be used if it is available. If it is * not available, a different handler may be used instead. * * @return the filter that shall be used by the signature handler */ public String getFilter() { return dictionary.getNameAsString(COSName.FILTER); } /** * (Optional) The signature handler that shall be used to sign the signature field. * * @param filter is the filter that shall be used by the signature handler */ public void setFilter(COSName filter) { dictionary.setItem(COSName.FILTER, filter); } /** * If <b>SubFilter</b> is not null and the {@link #isSubFilterRequired()} indicates this * entry is a required constraint, then the first matching encodings shall be used when * signing; otherwise, signing shall not take place. If {@link #isSubFilterRequired()} * indicates that this is an optional constraint, then the first matching encoding shall * be used if it is available. If it is not available, a different encoding may be used * instead. * * @return the subfilter that shall be used by the signature handler */ public List<String> getSubFilter() { List<String> retval = null; COSArray fields = (COSArray)dictionary.getDictionaryObject(COSName.SUB_FILTER); if (fields != null) { List<String> actuals = new ArrayList<String>(); for ( int i = 0; i < fields.size(); i++ ) { String element = fields.getName(i); if (element != null) { actuals.add(element); } } retval = new COSArrayList<String>(actuals, fields); } return retval; } /** * (Optional) An array of names indicating encodings to use when signing. The first name * in the array that matches an encoding supported by the signature handler shall be the * encoding that is actually used for signing. * * @param subfilter is the name that shall be used for encoding */ public void setSubFilter(List<COSName> subfilter) { dictionary.setItem(COSName.SUB_FILTER, COSArrayList.converterToCOSArray(subfilter)); } /** * An array of names indicating acceptable digest algorithms to use when * signing. The value shall be one of <b>SHA1</b>, <b>SHA256</b>, <b>SHA384</b>, * <b>SHA512</b>, <b>RIPEMD160</b>. The default value is implementation-specific. * * @return the digest method that shall be used by the signature handler */ public List<String> getDigestMethod() { List<String> retval = null; COSArray fields = (COSArray)dictionary.getDictionaryObject(COSName.DIGEST_METHOD); if (fields != null) { List<String> actuals = new ArrayList<String>(); for ( int i = 0; i < fields.size(); i++ ) { String element = fields.getName(i); if (element != null) { actuals.add(element); } } retval = new COSArrayList<String>(actuals, fields); } return retval; } /** * <p>(Optional, PDF 1.7) An array of names indicating acceptable digest * algorithms to use when signing. The value shall be one of <b>SHA1</b>, * <b>SHA256</b>, <b>SHA384</b>, <b>SHA512</b>, <b>RIPEMD160</b>. The default * value is implementation-specific.</p> * * <p>This property is only applicable if the digital credential signing contains RSA * public/privat keys</p> * * @param digestMethod is a list of possible names of the digests, that should be * used for signing. */ public void setDigestMethod(List<COSName> digestMethod) { // integrity check for ( COSName cosName : digestMethod ) { if (!(cosName.equals(COSName.DIGEST_SHA1) || cosName.equals(COSName.DIGEST_SHA256) || cosName.equals(COSName.DIGEST_SHA384) || cosName.equals(COSName.DIGEST_SHA512) || cosName.equals(COSName.DIGEST_RIPEMD160))) { throw new IllegalArgumentException("Specified digest " + cosName.getName() + " isn't allowed."); } } dictionary.setItem(COSName.DIGEST_METHOD, COSArrayList.converterToCOSArray(digestMethod)); } /** * The minimum required capability of the signature field seed value * dictionary parser. A value of 1 specifies that the parser shall be able to * recognize all seed value dictionary entries in a PDF 1.5 file. A value of 2 * specifies that it shall be able to recognize all seed value dictionary entries * specified. * * @return the minimum required capability of the signature field seed value * dictionary parser */ public float getV() { return dictionary.getFloat(COSName.V); } /** * (Optional) The minimum required capability of the signature field seed value * dictionary parser. A value of 1 specifies that the parser shall be able to * recognize all seed value dictionary entries in a PDF 1.5 file. A value of 2 * specifies that it shall be able to recognize all seed value dictionary entries * specified. * * @param minimumRequiredCapability is the minimum required capability of the * signature field seed value dictionary parser */ public void setV(float minimumRequiredCapability) { dictionary.setFloat(COSName.V, minimumRequiredCapability); } /** * If the Reasons array is provided and {@link #isReasonRequired()} indicates that * Reasons is a required constraint, one of the reasons in the array shall be used * for the signature dictionary; otherwise signing shall not take place. If the * {@link #isReasonRequired()} indicates Reasons is an optional constraint, one of * the reasons in the array may be chose or a custom reason can be provided. * * @return the reasons that should be used by the signature handler */ public List<String> getReasons() { List<String> retval = null; COSArray fields = (COSArray)dictionary.getDictionaryObject(COSName.REASONS); if (fields != null) { List<String> actuals = new ArrayList<String>(); for ( int i = 0; i < fields.size(); i++ ) { String element = fields.getString(i); if (element != null) { actuals.add(element); } } retval = new COSArrayList<String>(actuals, fields); } return retval; } /** * (Optional) An array of text strings that specifying possible reasons for signing * a document. If specified, the reasons supplied in this entry replace those used * by conforming products. * * @param reasons is a list of possible text string that specifying possible reasons * * @deprecated use {@link #setReasons(java.util.List) } */ @Deprecated public void setReasonsd(List<String> reasons) { setReasons(reasons); } /** * (Optional) An array of text strings that specifying possible reasons for signing * a document. If specified, the reasons supplied in this entry replace those used * by conforming products. * * @param reasons is a list of possible text string that specifying possible reasons */ public void setReasons(List<String> reasons) { dictionary.setItem(COSName.REASONS, COSArrayList.converterToCOSArray(reasons)); } /** * <p>(Optional; PDF 1.6) A dictionary containing a single entry whose key is P * and whose value is an integer between 0 and 3. A value of 0 defines the * signatures as an author signature. The value 1 through 3 shall be used for * certification signatures and correspond to the value of P in a DocMDP transform * parameters dictionary.</p> * * <p>If this MDP key is not present or the MDP dictionary does not contain a P * entry, no rules shall be defined regarding the type of signature or its * permissions.</p> * * @return the mdp dictionary as PDSeedValueMDP */ public PDSeedValueMDP getMDP() { COSDictionary dict = dictionary.getCOSDictionary(COSName.MDP); PDSeedValueMDP mdp = null; if (dict != null) { mdp = new PDSeedValueMDP(dict); } return mdp; } /** * <p>(Optional; PDF 1.6) A dictionary containing a single entry whose key is P * and whose value is an integer between 0 and 3. A value of 0 defines the * signatures as an author signature. The value 1 through 3 shall be used for * certification signatures and correspond to the value of P in a DocMDP transform * parameters dictionary.</p> * * <p>If this MDP key is not present or the MDP dictionary does not contain a P * entry, no rules shall be defined regarding the type of signature or its * permissions.</p> * * @param mdp dictionary */ public void setMPD(PDSeedValueMDP mdp) { if (mdp != null) { dictionary.setItem(COSName.MDP, mdp.getCOSObject()); } } /** * (Optional) A certificate seed value dictionary containing information about the certificate * to be used when signing. * * @return dictionary */ public PDSeedValueCertificate getSeedValueCertificate() { COSBase base = dictionary.getDictionaryObject(COSName.CERT); PDSeedValueCertificate certificate = null; if (base instanceof COSDictionary) { COSDictionary dict = (COSDictionary) base; certificate = new PDSeedValueCertificate(dict); } return certificate; } /** * (Optional) A certificate seed value dictionary containing information about the certificate * to be used when signing. * * @param certificate dictionary */ public void setSeedValueCertificate(PDSeedValueCertificate certificate) { dictionary.setItem(COSName.CERT, certificate); } /** * <p>(Optional; PDF 1.6) A time stamp dictionary containing two entries. URL which * is a ASCII string specifying the URL to a rfc3161 conform timestamp server and Ff * to indicate if a timestamp is required or optional.</p> * * @return the timestamp dictionary as PDSeedValueTimeStamp */ public PDSeedValueTimeStamp getTimeStamp() { COSDictionary dict = dictionary.getCOSDictionary(COSName.TIME_STAMP); PDSeedValueTimeStamp timestamp = null; if (dict != null) { timestamp = new PDSeedValueTimeStamp(dict); } return timestamp; } /** * <p>(Optional; PDF 1.6) A time stamp dictionary containing two entries. URL which * is a ASCII string specifying the URL to a rfc3161 conform timestamp server and Ff * to indicate if a timestamp is required or optional.</p> * * @param timestamp dictionary */ public void setTimeStamp(PDSeedValueTimeStamp timestamp) { if (timestamp != null) { dictionary.setItem(COSName.TIME_STAMP, timestamp.getCOSObject()); } } /** * (Optional, PDF 1.6) An array of text strings that specifying possible legal * attestations. * * @return the reasons that should be used by the signature handler */ public List<String> getLegalAttestation() { List<String> retval = null; COSArray fields = (COSArray)dictionary.getDictionaryObject(COSName.LEGAL_ATTESTATION); if (fields != null) { List<String> actuals = new ArrayList<String>(); for ( int i = 0; i < fields.size(); i++ ) { String element = fields.getString(i); if (element != null) { actuals.add(element); } } retval = new COSArrayList<String>(actuals, fields); } return retval; } /** * (Optional, PDF 1.6) An array of text strings that specifying possible legal * attestations. * * @param legalAttestation is a list of possible text string that specifying possible * legal attestations. */ public void setLegalAttestation(List<String> legalAttestation) { dictionary.setItem(COSName.LEGAL_ATTESTATION, COSArrayList.converterToCOSArray(legalAttestation)); } }
/** * Copyright 2012 Tobias Gierke <tobias.gierke@code-sourcery.de> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.codesourcery.springmass.math; import java.awt.Color; import java.text.DecimalFormat; public final class Vector4 { public double x; public double y; public double z; public double w; public static final Vector4 UP = new Vector4( 0, 1, 0); // +Y public static final Vector4 DOWN = new Vector4( 0,-1, 0); // -Y public static final Vector4 LEFT = new Vector4(-1, 0, 0); // -X public static final Vector4 RIGHT = new Vector4( 1, 0, 0); // +X public static final Vector4 INTO_VIEWPLANE = new Vector4( 0, 0,-1); // -Z public static final Vector4 OUTOF_VIEWPLANE = new Vector4( 0, 0,1); // +Z public Vector4(Vector4 input) { this.x = input.x; this.y = input.y; this.z = input.z; this.w = input.w; } @Override public boolean equals(Object obj) { if ( obj != null && obj.getClass() == Vector4.class ) { Vector4 o = (Vector4) obj; return this.x == o.x && this.y == o.y && this.z == o.z && this.w == o.w; } return false; } public void copyInto(Vector4 other) { other.x = x; other.y = y; other.z = z; other.w = w; } public boolean equals(Object obj,double epsilon) { if ( obj != null && obj.getClass() == Vector4.class ) { Vector4 o = (Vector4) obj; return Math.abs( this.x - o.x ) < epsilon && Math.abs( this.y - o.y ) < epsilon && Math.abs( this.z - o.z ) < epsilon && Math.abs( this.w - o.w ) < epsilon; } return false; } public static Vector4 valueOf(Color c) { return new Vector4( c.getRed() / 255.0 , c.getGreen() / 255.0 , c.getBlue() / 255.0 ); } public int toRGB() { int r = (int) Math.max( 0 , Math.min(r()*255f,255) ); int g = (int) Math.max( 0 , Math.min(g()*255f,255) ); int b = (int) Math.max( 0 , Math.min(b()*255f,255) ); int color = r << 16 | g << 8 | b; return color; } public Color toColor() { return new Color( toRGB() ); } public Vector4() { } public Vector4(double[] data) { this.x=data[0]; this.y=data[1]; this.z=data[2]; this.w=data[3]; } public void setData(double[] data,int offset) { this.x = data[offset]; this.y = data[offset+1]; this.z = data[offset+2]; this.w = data[offset+3]; } public void copyFrom(Vector4 other) { this.x = other.x; this.y = other.y; this.z = other.z; this.w = other.w; } public void set(double x,double y,double z) { this.x = x; this.y = y; this.z = z; } public void setXZYToZero() { this.x = 0; this.y = 0; this.z = 0; } public void copyInto(double[] array,int startingOffset) { array[startingOffset] = x; array[startingOffset+1] = y; array[startingOffset+2] = z; array[startingOffset+3] = w; } public Vector4(double[] data,int offset) { this.x = data[offset]; this.y = data[offset+1]; this.z = data[offset+2]; this.w = data[offset+3]; } public Vector4 flip() { return new Vector4(-x,-y,-z,w); } public void flipInPlace() { this.x = -x; this.y = -y; this.z = -z; } public boolean isEquals(Vector4 other) { return this.x == other.x && this.y == other.y && this.z == other.z && this.w == other.w; } public void x(double value) { this.x = value; } public void r(double value) { this.x = value; } public void y(double value) { this.y = value; } public void g(double value) { this.y = value; } public void z(double value) { this.z = value; } public void b(double value) { this.z = value; } public void w(double value) { this.w = value; } public void a(double value) { this.w = value; } public double x() { return x; } public double r() { return x; } public double y() { return y; } public double g() { return y; } public double z() { return z; } public double b() { return z; } public double w() { return w; } public double a() { return w; } public Vector4 minus(Vector4 other) { return new Vector4( this.x - other.x , this.y - other.y , this.z - other.z , this.w ); } public void minusInPlace(Vector4 other) { this.x = this.x - other.x; this.y = this.y - other.y; this.z = this.z - other.z; } public double distanceTo(Vector4 point) { double x = this.x - point.x; double y = this.y - point.y; double z = this.z - point.z; return Math.sqrt( x*x + y*y + z*z ); } public double distanceSquaredTo(Vector4 point) { double x = this.x - point.x; double y = this.y - point.y; double z = this.z - point.z; return x*x + y*y + z*z; } public double squaredDistanceTo(Vector4 point) { double x = this.x - point.x; double y = this.y - point.y; double z = this.z - point.z; return x*x + y*y + z*z; } public Vector4 plus(Vector4 other) { return new Vector4( this.x + other.x , this.y + other.y , this.z + other.z , w ); } public Vector4 plus(Vector4 v1,Vector4 v2) { return new Vector4( this.x + v1.x + v2.x , this.y + v1.y +v2.y , this.z + v1.z + v2.z , w ); } public void plusInPlace(Vector4 v1,Vector4 v2) { this.x = this.x + v1.x + v2.x ; this.y = this.y + v1.y + v2.y ; this.z = this.z + v1.z + v2.z ; } public void plusInPlace(Vector4 other) { this.x = this.x + other.x; this.y = this.y + other.y; this.z = this.z + other.z; } public Vector4(double x,double y,double z) { this(x,y,z,1); } public void setToZero() { this.x = this.y = this.z = 0; } public Vector4(double x,double y,double z,double w) { this.x = x; this.y = y; this.z=z; this.w=w; } public static Vector4 min(Vector4... vectors) { double minX = Double.POSITIVE_INFINITY; double minY = Double.POSITIVE_INFINITY; double minZ = Double.POSITIVE_INFINITY; for ( Vector4 v : vectors ) { minX = Math.min( minX , v.x ); minY = Math.min( minY , v.y ); minZ = Math.min( minZ , v.z ); } return new Vector4(minX,minY,minZ); } public static Vector4 max(Vector4... vectors) { double maxX = Double.NEGATIVE_INFINITY; double maxY = Double.NEGATIVE_INFINITY; double maxZ = Double.NEGATIVE_INFINITY; for ( Vector4 v : vectors ) { maxX = Math.max(maxX,v.x); maxY = Math.max(maxY,v.y); maxZ = Math.max(maxZ,v.z); } return new Vector4(maxX,maxY,maxZ); } public Vector4 multiply( Matrix matrix) { final double[] matrixData = matrix.data; double nx = x * matrixData[0] + y * matrixData[1]+ z * matrixData[2]+ w * matrixData[3]; double ny = x * matrixData[4] + y * matrixData[5] + z * matrixData[6] + w * matrixData[7]; double nz = x * matrixData[8] + y * matrixData[9] + z * matrixData[10] + w * matrixData[11]; double nw = x * matrixData[12] + y * matrixData[13] + z * matrixData[14] + w * matrixData[15]; return new Vector4( nx,ny,nz,nw); } public Vector4 multiply(double value) { return new Vector4( x * value , y * value , z * value , w ); } public Vector4 multiplyAdd(double toMultiply ,Vector4 toAdd) { return new Vector4( x * toMultiply + toAdd.x , y * toMultiply + toAdd.y , z * toMultiply + toAdd.z , w ); } public void multiplyAddInPlace(double toMultiply ,Vector4 toAdd) { this.x = x * toMultiply + toAdd.x; this.y = y * toMultiply + toAdd.y; this.z = z * toMultiply + toAdd.z; } public Vector4 multiplyAdd(Vector4 toMultiply ,Vector4 toAdd) { return new Vector4( x * toMultiply.x + toAdd.x , y * toMultiply.y + toAdd.y , z * toMultiply.z + toAdd.z , w ); } public void multiplyInPlace(double value) { this.x = this.x * value; this.y = this.y * value; this.z = this.z * value; } public Vector4 normalize() { final double len = Math.sqrt( x*x + y*y +z*z ); if ( len == 0 ) { return new Vector4(); } return new Vector4( x / len , y / len , z / len , w ); } public void normalizeInPlace() { final double len = Math.sqrt( x*x + y*y +z*z ); if ( len != 0 && len != 1 ) { this.x = this.x / len; this.y = this.y / len; this.z = this.z / len; } } public Vector4 normalizeW() { if ( w != 1.0 ) { return new Vector4( x / w, y / w , z / w , 1 ); } return this; } public void normalizeWInPlace() { if ( w != 1.0 ) { x = x / w ; y = y / w ; z = z / w ; } } // scalar / dot product public double dotProduct(Vector4 o) { return x*o.x + y*o.y + z * o.z; } public Vector4 straightMultiply(Vector4 o) { return new Vector4( this.x * o.x , this.y * o.y , this.z * o.z, this.w * o.w ); } public double length() { return Math.sqrt( x*x + y*y +z*z ); } public double lengthSquared() { return x*x + y*y +z*z; } public double magnitude() { return x*x + y * y + z * z; } public double angleInRadians(Vector4 o) { // => cos final double cosine = dotProduct( o ) / ( length() * o.length() ); return Math.acos( cosine ); } public double angleInDegrees(Vector4 o) { final double factor = (180.0d / Math.PI); return angleInRadians(o)*factor; } public Vector4 crossProduct(Vector4 o) { double newX = y * o.z - o.y * z; double newY = z * o.x - o.z * x; double newZ = x * o.y - o.x * y; return new Vector4( newX ,newY,newZ ); } @Override public String toString() { return "("+format( x() ) +","+format( y() ) +","+format( z() )+","+format( w() )+")"; } private static String format(double d) { return new DecimalFormat("##0.0###").format( d ); } public Vector4 clamp(double min, double max) { double newX = x; double newY = y; double newZ = z; if ( newX < min ) { newX = min; } else if ( newX > max ) { newX = max; } if ( newY < min ) { newY = min; } else if ( newY > max ) { newY = max; } if ( newZ < min ) { newZ = min; } else if ( newZ > max ) { newZ = max; } return new Vector4(newX,newY,newZ); } public void clampMagnitudeInPlace(double magnitude) { final double len = length(); if ( len <= magnitude ) { return; } final double factor = magnitude / len; this.x *= factor; this.y *= factor; this.z *= factor; } public void set(Vector4 p) { this.x = p.x; this.y = p.y; this.z = p.z; this.w = p.w; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.extensions.ajax.markup.html; import java.io.Serializable; import org.apache.wicket.Component; import org.apache.wicket.MarkupContainer; import org.apache.wicket.RequestCycle; import org.apache.wicket.ajax.AbstractDefaultAjaxBehavior; import org.apache.wicket.ajax.AjaxEventBehavior; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.markup.ComponentTag; import org.apache.wicket.markup.MarkupStream; import org.apache.wicket.markup.html.WebComponent; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.form.FormComponent; import org.apache.wicket.markup.html.form.TextField; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.model.IModel; import org.apache.wicket.util.convert.IConverter; import org.apache.wicket.util.string.JavascriptUtils; import org.apache.wicket.validation.IValidator; /** * An implementation of ajaxified edit-in-place component using a {@link TextField} as it's editor. * <p> * There are several methods that can be overridden for customization. * <ul> * <li>{@link #onEdit(AjaxRequestTarget)} is called when the label is clicked and the editor is to * be displayed. The default implementation switches the label for the editor and places the caret * at the end of the text.</li> * <li>{@link #onSubmit(AjaxRequestTarget)} is called when in edit mode, the user submitted new * content, that content validated well, and the model value successfully updated. This * implementation also clears any <code>window.status</code> set.</li> * <li>{@link #onError(AjaxRequestTarget)} is called when in edit mode, the user submitted new * content, but that content did not validate. Get the current input by calling * {@link FormComponent#getInput()} on {@link #getEditor()}, and the error message by calling: * * <pre> * String errorMessage = editor.getFeedbackMessage().getMessage(); * </pre> * * The default implementation of this method displays the error message in * <code>window.status</code>, redisplays the editor, selects the editor's content and sets the * focus on it. * <li>{@link #onCancel(AjaxRequestTarget)} is called when in edit mode, the user choose not to * submit the contents (he/she pressed escape). The default implementation displays the label again * without any further action.</li> * </ul> * </p> * * @author Igor Vaynberg (ivaynberg) * @author Eelco Hillenius * @param <T> */ // TODO wonder if it makes sense to refactor this into a formcomponentpanel public class AjaxEditableLabel<T> extends Panel { private static final long serialVersionUID = 1L; /** editor component. */ private FormComponent<T> editor; /** label component. */ private WebComponent label; protected class EditorAjaxBehavior extends AbstractDefaultAjaxBehavior { private static final long serialVersionUID = 1L; /** * Constructor. */ public EditorAjaxBehavior() { } @Override protected void onComponentTag(ComponentTag tag) { super.onComponentTag(tag); final String saveCall = "{" + generateCallbackScript("wicketAjaxGet('" + getCallbackUrl() + "&save=true&'+this.name+'='+wicketEncode(this.value)") + "; return false;}"; final String cancelCall = "{" + generateCallbackScript("wicketAjaxGet('" + getCallbackUrl() + "&save=false'") + "; return false;}"; final String keypress = "var kc=wicketKeyCode(event); if (kc==27) " + cancelCall + " else if (kc!=13) { return true; } else " + saveCall; tag.put("onblur", saveCall); tag.put("onkeypress", "if (Wicket.Browser.isSafari()) { return; }; " + keypress); tag.put("onkeydown", "if (!Wicket.Browser.isSafari()) { return; }; " + keypress); } @Override protected void respond(AjaxRequestTarget target) { RequestCycle requestCycle = RequestCycle.get(); boolean save = Boolean.valueOf(requestCycle.getRequest().getParameter("save")) .booleanValue(); if (save) { editor.processInput(); if (editor.isValid()) { onSubmit(target); } else { onError(target); } } else { onCancel(target); } } } protected class LabelAjaxBehavior extends AjaxEventBehavior { private static final long serialVersionUID = 1L; /** * Construct. * * @param event */ public LabelAjaxBehavior(String event) { super(event); } @Override protected void onEvent(AjaxRequestTarget target) { onEdit(target); } } /** * Constructor * * @param id */ public AjaxEditableLabel(String id) { super(id); setOutputMarkupId(true); } /** * Constructor * * @param id * @param model */ public AjaxEditableLabel(String id, IModel<T> model) { super(id, model); setOutputMarkupId(true); } /** * Adds a validator to this form component. A model must be available for this component before * Validators can be added. Either add this Component to its parent (already having a Model), or * provide one before this call via constructor {@link #AjaxEditableLabel(String,IModel)} or * {@link #setDefaultModel(IModel)}. * * @param validator * The validator * @return This */ public final AjaxEditableLabel add(IValidator validator) { getEditor().add(validator); return this; } /** * Implementation that returns null by default (panels don't typically need converters anyway). * This is used by the embedded default instances of label and form field to determine whether * they should use a converter like they normally would (when this method returns null), or * whether they should use a custom converter (when this method is overridden and returns not * null). * * @see org.apache.wicket.Component#getConverter(java.lang.Class) */ @Override public IConverter getConverter(Class<?> type) { return null; } /** * The value will be made available to the validator property by means of ${label}. It does not * have any specific meaning to FormComponent itself. * * @param labelModel * @return this for chaining */ public final AjaxEditableLabel setLabel(final IModel<String> labelModel) { getEditor().setLabel(labelModel); return this; } /** * @see org.apache.wicket.MarkupContainer#setDefaultModel(org.apache.wicket.model.IModel) */ @Override public final AjaxEditableLabel setDefaultModel(IModel<?> model) { super.setDefaultModel(model); getLabel().setDefaultModel(model); getEditor().setDefaultModel(model); return this; } /** * Sets the required flag * * @param required * @return this for chaining */ public final AjaxEditableLabel setRequired(final boolean required) { getEditor().setRequired(required); return this; } /** * Sets the type that will be used when updating the model for this component. If no type is * specified String type is assumed. * * @param type * @return this for chaining */ public final AjaxEditableLabel setType(Class<?> type) { getEditor().setType(type); return this; } /** * Create a new form component instance to serve as editor. * * @param parent * The parent component * @param componentId * Id that should be used by the component * @param model * The model * @return The editor */ protected FormComponent<T> newEditor(MarkupContainer parent, String componentId, IModel<T> model) { TextField<T> editor = new TextField<T>(componentId, model) { private static final long serialVersionUID = 1L; @Override public IConverter getConverter(Class<?> type) { IConverter c = AjaxEditableLabel.this.getConverter(type); return c != null ? c : super.getConverter(type); } @Override protected void onModelChanged() { super.onModelChanged(); AjaxEditableLabel.this.onModelChanged(); } @Override protected void onModelChanging() { super.onModelChanging(); AjaxEditableLabel.this.onModelChanging(); } }; editor.setOutputMarkupId(true); editor.setVisible(false); editor.add(new EditorAjaxBehavior()); return editor; } /** * Create a new form component instance to serve as label. * * @param parent * The parent component * @param componentId * Id that should be used by the component * @param model * The model * @return The editor */ protected WebComponent newLabel(MarkupContainer parent, String componentId, IModel<T> model) { Label label = new Label(componentId, model) { private static final long serialVersionUID = 1L; @Override public IConverter getConverter(Class<?> type) { IConverter c = AjaxEditableLabel.this.getConverter(type); return c != null ? c : super.getConverter(type); } @Override protected void onComponentTagBody(MarkupStream markupStream, ComponentTag openTag) { Object modelObject = getDefaultModelObject(); if (modelObject == null || "".equals(modelObject)) { replaceComponentTagBody(markupStream, openTag, defaultNullLabel()); } else { super.onComponentTagBody(markupStream, openTag); } } }; label.setOutputMarkupId(true); label.add(new LabelAjaxBehavior(getLabelAjaxEvent())); return label; } /** * By default this returns "onclick" uses can overwrite this on which event the label behavior * should be triggered * * @return The event name */ protected String getLabelAjaxEvent() { return "onclick"; } /** * Gets the editor component. * * @return The editor component */ protected final FormComponent<T> getEditor() { if (editor == null) { initLabelAndEditor(getParentModel()); } return editor; } /** * Gets the label component. * * @return The label component */ protected final WebComponent getLabel() { if (label == null) { initLabelAndEditor(getParentModel()); } return label; } /** * @see org.apache.wicket.Component#onBeforeRender() */ @Override protected void onBeforeRender() { super.onBeforeRender(); // lazily add label and editor if (editor == null) { initLabelAndEditor(getParentModel()); } label.setEnabled(isEnableAllowed() && isEnabled()); } /** * Invoked when the label is in edit mode, and received a cancel event. Typically, nothing * should be done here. * * @param target * the ajax request target */ protected void onCancel(AjaxRequestTarget target) { label.setVisible(true); editor.setVisible(false); target.addComponent(AjaxEditableLabel.this); } /** * Called when the label is clicked and the component is put in edit mode. * * @param target * Ajax target */ protected void onEdit(AjaxRequestTarget target) { label.setVisible(false); editor.setVisible(true); target.addComponent(AjaxEditableLabel.this); // put focus on the textfield and stupid explorer hack to move the // caret to the end target.appendJavascript("{ var el=wicketGet('" + editor.getMarkupId() + "');" + " if (el.createTextRange) { " + " var v = el.value; var r = el.createTextRange(); " + " r.moveStart('character', v.length); r.select(); } }"); target.focusComponent(editor); } /** * Invoked when the label is in edit mode, received a new input, but that input didn't validate * * @param target * the ajax request target */ protected void onError(AjaxRequestTarget target) { Serializable errorMessage = editor.getFeedbackMessage().getMessage(); if (errorMessage instanceof String) { target.appendJavascript("window.status='" + JavascriptUtils.escapeQuotes((String)errorMessage) + "';"); } String editorMarkupId = editor.getMarkupId(); target.appendJavascript(editorMarkupId + ".select();"); target.appendJavascript(editorMarkupId + ".focus();"); target.addComponent(editor); } /** * Invoked when the editor was successfully updated. Use this method e.g. to persist the changed * value. This implementation displays the label and clears any window status that might have * been set in onError. * * @param target * The ajax request target */ protected void onSubmit(AjaxRequestTarget target) { label.setVisible(true); editor.setVisible(false); target.addComponent(AjaxEditableLabel.this); target.appendJavascript("window.status='';"); } /** * Lazy initialization of the label and editor components and set tempModel to null. * * @param model * The model for the label and editor */ private void initLabelAndEditor(IModel<T> model) { editor = newEditor(this, "editor", model); label = newLabel(this, "label", model); add(label); add(editor); } /** * @return Gets the parent model in case no explicit model was specified. */ @SuppressWarnings("unchecked") private IModel<T> getParentModel() { // the #getModel() call below will resolve and assign any inheritable // model this component can use. Set that directly to the label and // editor so that those components work like this enclosing panel // does not exist (must have that e.g. with CompoundPropertyModels) IModel<T> m = (IModel<T>)getDefaultModel(); // check that a model was found if (m == null) { Component parent = getParent(); String msg = "No model found for this component, either pass one explicitly or " + "make sure an inheritable model is available."; if (parent == null) { msg += " This component is not added to a parent yet, so if this component " + "is supposed to use the model of the parent (e.g. when it uses a " + "compound property model), add it first before further configuring " + "the component calling methods like e.g. setType and addValidator."; } throw new IllegalStateException(msg); } return m; } /** * Override this to display a different value when the model object is null. Default is * <code>...</code> * * @return The string which should be displayed when the model object is null. */ protected String defaultNullLabel() { return "..."; } /** * Dummy override to fix WICKET-1239 * * @see org.apache.wicket.Component#onModelChanged() */ @Override protected void onModelChanged() { super.onModelChanged(); } /** * Dummy override to fix WICKET-1239 * * @see org.apache.wicket.Component#onModelChanging() */ @Override protected void onModelChanging() { super.onModelChanging(); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.cluster.coordination; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskConfig; import org.elasticsearch.cluster.ClusterStateTaskListener; import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.coordination.Coordinator.Mode; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.RerouteService; import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.cluster.service.MasterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.env.Environment; import org.elasticsearch.monitor.NodeHealthService; import org.elasticsearch.monitor.StatusInfo; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool.Names; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportResponse; import org.elasticsearch.transport.TransportResponse.Empty; import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; import java.util.function.Function; import java.util.function.LongSupplier; import java.util.function.Supplier; import static org.elasticsearch.monitor.StatusInfo.Status.UNHEALTHY; public class JoinHelper { private static final Logger logger = LogManager.getLogger(JoinHelper.class); public static final String START_JOIN_ACTION_NAME = "internal:cluster/coordination/start_join"; public static final String JOIN_ACTION_NAME = "internal:cluster/coordination/join"; public static final String JOIN_VALIDATE_ACTION_NAME = "internal:cluster/coordination/join/validate"; public static final String JOIN_PING_ACTION_NAME = "internal:cluster/coordination/join/ping"; private final MasterService masterService; private final TransportService transportService; private volatile JoinTaskExecutor joinTaskExecutor; private final NodeHealthService nodeHealthService; private final JoinReasonService joinReasonService; private final Set<Tuple<DiscoveryNode, JoinRequest>> pendingOutgoingJoins = Collections.synchronizedSet(new HashSet<>()); private final AtomicReference<FailedJoinAttempt> lastFailedJoinAttempt = new AtomicReference<>(); private final Map<DiscoveryNode, Releasable> joinConnections = new HashMap<>(); // synchronized on itself private final Supplier<JoinTaskExecutor> joinTaskExecutorGenerator; JoinHelper( Settings settings, AllocationService allocationService, MasterService masterService, TransportService transportService, LongSupplier currentTermSupplier, Supplier<ClusterState> currentStateSupplier, BiConsumer<JoinRequest, ActionListener<Void>> joinHandler, Function<StartJoinRequest, Join> joinLeaderInTerm, Collection<BiConsumer<DiscoveryNode, ClusterState>> joinValidators, RerouteService rerouteService, NodeHealthService nodeHealthService, JoinReasonService joinReasonService ) { this.masterService = masterService; this.transportService = transportService; this.nodeHealthService = nodeHealthService; this.joinReasonService = joinReasonService; this.joinTaskExecutorGenerator = () -> new JoinTaskExecutor(allocationService, logger, rerouteService) { private final long term = currentTermSupplier.getAsLong(); @Override public ClusterTasksResult<JoinTaskExecutor.Task> execute(ClusterState currentState, List<JoinTaskExecutor.Task> joiningTasks) throws Exception { // The current state that MasterService uses might have been updated by a (different) master in a higher term already // Stop processing the current cluster state update, as there's no point in continuing to compute it as // it will later be rejected by Coordinator.publish(...) anyhow if (currentState.term() > term) { logger.trace("encountered higher term {} than current {}, there is a newer master", currentState.term(), term); throw new NotMasterException( "Higher term encountered (current: " + currentState.term() + " > used: " + term + "), there is a newer master" ); } else if (currentState.nodes().getMasterNodeId() == null && joiningTasks.stream().anyMatch(Task::isBecomeMasterTask)) { assert currentState.term() < term : "there should be at most one become master task per election (= by term)"; final CoordinationMetadata coordinationMetadata = CoordinationMetadata.builder(currentState.coordinationMetadata()) .term(term) .build(); final Metadata metadata = Metadata.builder(currentState.metadata()).coordinationMetadata(coordinationMetadata).build(); currentState = ClusterState.builder(currentState).metadata(metadata).build(); } else if (currentState.nodes().isLocalNodeElectedMaster()) { assert currentState.term() == term : "term should be stable for the same master"; } return super.execute(currentState, joiningTasks); } }; transportService.registerRequestHandler( JOIN_ACTION_NAME, ThreadPool.Names.GENERIC, false, false, JoinRequest::new, (request, channel, task) -> joinHandler.accept( request, new ChannelActionListener<Empty, JoinRequest>(channel, JOIN_ACTION_NAME, request).map(ignored -> Empty.INSTANCE) ) ); transportService.registerRequestHandler( START_JOIN_ACTION_NAME, Names.GENERIC, false, false, StartJoinRequest::new, (request, channel, task) -> { final DiscoveryNode destination = request.getSourceNode(); sendJoinRequest(destination, currentTermSupplier.getAsLong(), Optional.of(joinLeaderInTerm.apply(request))); channel.sendResponse(Empty.INSTANCE); } ); transportService.registerRequestHandler( JOIN_PING_ACTION_NAME, ThreadPool.Names.SAME, false, false, TransportRequest.Empty::new, (request, channel, task) -> channel.sendResponse(Empty.INSTANCE) ); final List<String> dataPaths = Environment.PATH_DATA_SETTING.get(settings); transportService.registerRequestHandler( JOIN_VALIDATE_ACTION_NAME, ThreadPool.Names.GENERIC, ValidateJoinRequest::new, (request, channel, task) -> { final ClusterState localState = currentStateSupplier.get(); if (localState.metadata().clusterUUIDCommitted() && localState.metadata().clusterUUID().equals(request.getState().metadata().clusterUUID()) == false) { throw new CoordinationStateRejectedException( "This node previously joined a cluster with UUID [" + localState.metadata().clusterUUID() + "] and is now trying to join a different cluster with UUID [" + request.getState().metadata().clusterUUID() + "]. This is forbidden and usually indicates an incorrect " + "discovery or cluster bootstrapping configuration. Note that the cluster UUID persists across restarts and " + "can only be changed by deleting the contents of the node's data " + (dataPaths.size() == 1 ? "path " : "paths ") + dataPaths + " which will also remove any data held by this node." ); } joinValidators.forEach(action -> action.accept(transportService.getLocalNode(), request.getState())); channel.sendResponse(Empty.INSTANCE); } ); } boolean isJoinPending() { return pendingOutgoingJoins.isEmpty() == false; } public void onClusterStateApplied() { // we applied a cluster state as LEADER or FOLLOWER which means the NodeConnectionsService has taken ownership of any connections to // nodes in the cluster and therefore we can release the connection(s) that we were using for joining final List<Releasable> releasables; synchronized (joinConnections) { if (joinConnections.isEmpty()) { return; } releasables = new ArrayList<>(joinConnections.values()); joinConnections.clear(); } logger.debug("releasing [{}] connections on successful cluster state application", releasables.size()); releasables.forEach(Releasables::close); } private void registerConnection(DiscoveryNode destination, Releasable connectionReference) { final Releasable previousConnection; synchronized (joinConnections) { previousConnection = joinConnections.put(destination, connectionReference); } Releasables.close(previousConnection); } private void unregisterAndReleaseConnection(DiscoveryNode destination, Releasable connectionReference) { synchronized (joinConnections) { joinConnections.remove(destination, connectionReference); } Releasables.close(connectionReference); } // package-private for testing static class FailedJoinAttempt { private final DiscoveryNode destination; private final JoinRequest joinRequest; private final TransportException exception; private final long timestamp; FailedJoinAttempt(DiscoveryNode destination, JoinRequest joinRequest, TransportException exception) { this.destination = destination; this.joinRequest = joinRequest; this.exception = exception; this.timestamp = System.nanoTime(); } void logNow() { logger.log( getLogLevel(exception), () -> new ParameterizedMessage("failed to join {} with {}", destination, joinRequest), exception ); } static Level getLogLevel(TransportException e) { Throwable cause = e.unwrapCause(); if (cause instanceof CoordinationStateRejectedException || cause instanceof FailedToCommitClusterStateException || cause instanceof NotMasterException) { return Level.DEBUG; } return Level.INFO; } void logWarnWithTimestamp() { logger.warn( () -> new ParameterizedMessage( "last failed join attempt was {} ago, failed to join {} with {}", TimeValue.timeValueMillis(TimeValue.nsecToMSec(System.nanoTime() - timestamp)), destination, joinRequest ), exception ); } } void logLastFailedJoinAttempt() { FailedJoinAttempt attempt = lastFailedJoinAttempt.get(); if (attempt != null) { attempt.logWarnWithTimestamp(); lastFailedJoinAttempt.compareAndSet(attempt, null); } } public void sendJoinRequest(DiscoveryNode destination, long term, Optional<Join> optionalJoin) { assert destination.isMasterNode() : "trying to join master-ineligible " + destination; final StatusInfo statusInfo = nodeHealthService.getHealth(); if (statusInfo.getStatus() == UNHEALTHY) { logger.debug("dropping join request to [{}]: [{}]", destination, statusInfo.getInfo()); return; } final JoinRequest joinRequest = new JoinRequest(transportService.getLocalNode(), term, optionalJoin); final Tuple<DiscoveryNode, JoinRequest> dedupKey = Tuple.tuple(destination, joinRequest); if (pendingOutgoingJoins.add(dedupKey)) { logger.debug("attempting to join {} with {}", destination, joinRequest); // Typically we're already connected to the destination at this point, the PeerFinder holds a reference to this connection to // keep it open, but we need to acquire our own reference to keep the connection alive through the joining process. transportService.connectToNode(destination, new ActionListener<Releasable>() { @Override public void onResponse(Releasable connectionReference) { logger.trace("acquired connection for joining join {} with {}", destination, joinRequest); // Register the connection in joinConnections so it can be released once we successfully apply the cluster state, at // which point the NodeConnectionsService will have taken ownership of it. registerConnection(destination, connectionReference); transportService.sendRequest( destination, JOIN_ACTION_NAME, joinRequest, TransportRequestOptions.of(null, TransportRequestOptions.Type.PING), new TransportResponseHandler.Empty() { @Override public void handleResponse(TransportResponse.Empty response) { pendingOutgoingJoins.remove(dedupKey); logger.debug("successfully joined {} with {}", destination, joinRequest); lastFailedJoinAttempt.set(null); } @Override public void handleException(TransportException exp) { pendingOutgoingJoins.remove(dedupKey); FailedJoinAttempt attempt = new FailedJoinAttempt(destination, joinRequest, exp); attempt.logNow(); lastFailedJoinAttempt.set(attempt); unregisterAndReleaseConnection(destination, connectionReference); } } ); } @Override public void onFailure(Exception e) { pendingOutgoingJoins.remove(dedupKey); FailedJoinAttempt attempt = new FailedJoinAttempt( destination, joinRequest, new ConnectTransportException(destination, "failed to acquire connection", e) ); attempt.logNow(); lastFailedJoinAttempt.set(attempt); } }); } else { logger.debug("already attempting to join {} with request {}, not sending request", destination, joinRequest); } } void sendStartJoinRequest(final StartJoinRequest startJoinRequest, final DiscoveryNode destination) { assert startJoinRequest.getSourceNode().isMasterNode() : "sending start-join request for master-ineligible " + startJoinRequest.getSourceNode(); transportService.sendRequest(destination, START_JOIN_ACTION_NAME, startJoinRequest, new TransportResponseHandler.Empty() { @Override public void handleResponse(TransportResponse.Empty response) { logger.debug("successful response to {} from {}", startJoinRequest, destination); } @Override public void handleException(TransportException exp) { logger.debug(new ParameterizedMessage("failure in response to {} from {}", startJoinRequest, destination), exp); } }); } static class JoinTaskListener implements ClusterStateTaskListener { private final JoinTaskExecutor.Task task; private final ActionListener<Void> joinListener; JoinTaskListener(JoinTaskExecutor.Task task, ActionListener<Void> joinListener) { this.task = task; this.joinListener = joinListener; } @Override public void onFailure(String source, Exception e) { joinListener.onFailure(e); } @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { joinListener.onResponse(null); } @Override public String toString() { return "JoinTaskListener{task=" + task + "}"; } } interface JoinAccumulator { void handleJoinRequest(DiscoveryNode sender, ActionListener<Void> joinListener); default void close(Mode newMode) {} } class LeaderJoinAccumulator implements JoinAccumulator { @Override public void handleJoinRequest(DiscoveryNode sender, ActionListener<Void> joinListener) { final JoinTaskExecutor.Task task = new JoinTaskExecutor.Task(sender, joinReasonService.getJoinReason(sender, Mode.LEADER)); assert joinTaskExecutor != null; masterService.submitStateUpdateTask( "node-join", task, ClusterStateTaskConfig.build(Priority.URGENT), joinTaskExecutor, new JoinTaskListener(task, joinListener) ); } @Override public String toString() { return "LeaderJoinAccumulator"; } } static class InitialJoinAccumulator implements JoinAccumulator { @Override public void handleJoinRequest(DiscoveryNode sender, ActionListener<Void> joinListener) { assert false : "unexpected join from " + sender + " during initialisation"; joinListener.onFailure(new CoordinationStateRejectedException("join target is not initialised yet")); } @Override public String toString() { return "InitialJoinAccumulator"; } } static class FollowerJoinAccumulator implements JoinAccumulator { @Override public void handleJoinRequest(DiscoveryNode sender, ActionListener<Void> joinListener) { joinListener.onFailure(new CoordinationStateRejectedException("join target is a follower")); } @Override public String toString() { return "FollowerJoinAccumulator"; } } class CandidateJoinAccumulator implements JoinAccumulator { private final Map<DiscoveryNode, ActionListener<Void>> joinRequestAccumulator = new HashMap<>(); boolean closed; @Override public void handleJoinRequest(DiscoveryNode sender, ActionListener<Void> joinListener) { assert closed == false : "CandidateJoinAccumulator closed"; ActionListener<Void> prev = joinRequestAccumulator.put(sender, joinListener); if (prev != null) { prev.onFailure(new CoordinationStateRejectedException("received a newer join from " + sender)); } } @Override public void close(Mode newMode) { assert closed == false : "CandidateJoinAccumulator closed"; closed = true; if (newMode == Mode.LEADER) { final Map<JoinTaskExecutor.Task, ClusterStateTaskListener> pendingAsTasks = new LinkedHashMap<>(); joinRequestAccumulator.forEach((node, listener) -> { final JoinTaskExecutor.Task task = new JoinTaskExecutor.Task( node, joinReasonService.getJoinReason(node, Mode.CANDIDATE) ); pendingAsTasks.put(task, new JoinTaskListener(task, listener)); }); final String stateUpdateSource = "elected-as-master ([" + pendingAsTasks.size() + "] nodes joined)"; pendingAsTasks.put(JoinTaskExecutor.newBecomeMasterTask(), (source, e) -> {}); pendingAsTasks.put(JoinTaskExecutor.newFinishElectionTask(), (source, e) -> {}); joinTaskExecutor = joinTaskExecutorGenerator.get(); masterService.submitStateUpdateTasks( stateUpdateSource, pendingAsTasks, ClusterStateTaskConfig.build(Priority.URGENT), joinTaskExecutor ); } else { assert newMode == Mode.FOLLOWER : newMode; joinTaskExecutor = null; joinRequestAccumulator.values() .forEach(joinCallback -> joinCallback.onFailure(new CoordinationStateRejectedException("became follower"))); } // CandidateJoinAccumulator is only closed when becoming leader or follower, otherwise it accumulates all joins received // regardless of term. } @Override public String toString() { return "CandidateJoinAccumulator{" + joinRequestAccumulator.keySet() + ", closed=" + closed + '}'; } } }
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.junit5; import com.intellij.junit4.ExpectedPatterns; import com.intellij.junit4.JUnit4TestListener; import com.intellij.rt.execution.junit.ComparisonFailureData; import com.intellij.rt.execution.junit.MapSerializerUtil; import org.junit.platform.engine.TestExecutionResult; import org.junit.platform.engine.TestSource; import org.junit.platform.engine.reporting.ReportEntry; import org.junit.platform.engine.support.descriptor.ClassSource; import org.junit.platform.engine.support.descriptor.CompositeTestSource; import org.junit.platform.engine.support.descriptor.FileSource; import org.junit.platform.engine.support.descriptor.MethodSource; import org.junit.platform.launcher.TestExecutionListener; import org.junit.platform.launcher.TestIdentifier; import org.junit.platform.launcher.TestPlan; import org.opentest4j.AssertionFailedError; import org.opentest4j.MultipleFailuresError; import org.opentest4j.ValueWrapper; import java.io.File; import java.io.PrintStream; import java.io.PrintWriter; import java.io.StringWriter; import java.util.*; public class JUnit5TestExecutionListener implements TestExecutionListener { private static final String NO_LOCATION_HINT = ""; private static final String NO_LOCATION_HINT_VALUE = ""; private final PrintStream myPrintStream; private TestPlan myTestPlan; private long myCurrentTestStart; private int myFinishCount = 0; private String myRootName; private boolean mySuccessful = true; private String myIdSuffix = ""; private final Set<TestIdentifier> myActiveRoots = new HashSet<>(); public JUnit5TestExecutionListener() { this(System.out); } public JUnit5TestExecutionListener(PrintStream printStream) { myPrintStream = printStream; myPrintStream.println("##teamcity[enteredTheMatrix]"); } public boolean wasSuccessful() { return mySuccessful; } public void initializeIdSuffix(boolean forked) { if (forked && myIdSuffix.length() == 0) { myIdSuffix = String.valueOf(System.currentTimeMillis()); } } public void initializeIdSuffix(int i) { myIdSuffix = i + "th"; } @Override public void reportingEntryPublished(TestIdentifier testIdentifier, ReportEntry entry) { StringBuilder builder = new StringBuilder(); builder.append("timestamp = ").append(entry.getTimestamp()); entry.getKeyValuePairs().forEach((key, value) -> builder.append(", ").append(key).append(" = ").append(value)); myPrintStream.println(builder.toString()); } @Override public void testPlanExecutionStarted(TestPlan testPlan) { if (myRootName != null) { int lastPointIdx = myRootName.lastIndexOf('.'); String name = myRootName; String comment = null; if (lastPointIdx >= 0) { name = myRootName.substring(lastPointIdx + 1); comment = myRootName.substring(0, lastPointIdx); } myPrintStream.println("##teamcity[rootName name = \'" + escapeName(name) + (comment != null ? ("\' comment = \'" + escapeName(comment)) : "") + "\'" + " location = \'java:suite://" + escapeName(myRootName) + "\']"); } } @Override public void testPlanExecutionFinished(TestPlan testPlan) { } @Override public void executionSkipped(TestIdentifier testIdentifier, String reason) { executionStarted (testIdentifier); executionFinished(testIdentifier, TestExecutionResult.Status.ABORTED, null, reason); } @Override public void executionStarted(TestIdentifier testIdentifier) { if (testIdentifier.isTest()) { testStarted(testIdentifier); myCurrentTestStart = System.currentTimeMillis(); } else if (hasNonTrivialParent(testIdentifier)) { myFinishCount = 0; myPrintStream.println("##teamcity[testSuiteStarted" + idAndName(testIdentifier) + getLocationHint(testIdentifier) + "]"); } } @Override public void dynamicTestRegistered(TestIdentifier testIdentifier) { myTestPlan.add(testIdentifier); } @Override public void executionFinished(TestIdentifier testIdentifier, TestExecutionResult testExecutionResult) { final TestExecutionResult.Status status = testExecutionResult.getStatus(); final Throwable throwableOptional = testExecutionResult.getThrowable().orElse(null); executionFinished(testIdentifier, status, throwableOptional, null); mySuccessful &= TestExecutionResult.Status.SUCCESSFUL == testExecutionResult.getStatus(); } private void executionFinished(TestIdentifier testIdentifier, TestExecutionResult.Status status, Throwable throwableOptional, String reason) { final String displayName = testIdentifier.getDisplayName(); if (testIdentifier.isTest()) { final long duration = getDuration(); if (status == TestExecutionResult.Status.FAILED) { testFailure(testIdentifier, MapSerializerUtil.TEST_FAILED, throwableOptional, duration, reason, true); } else if (status == TestExecutionResult.Status.ABORTED) { testFailure(testIdentifier, MapSerializerUtil.TEST_IGNORED, throwableOptional, duration, reason, true); } testFinished(testIdentifier, duration); myFinishCount++; } else if (hasNonTrivialParent(testIdentifier)){ String messageName = null; if (status == TestExecutionResult.Status.FAILED) { messageName = MapSerializerUtil.TEST_FAILED; } else if (status == TestExecutionResult.Status.ABORTED) { messageName = MapSerializerUtil.TEST_IGNORED; } if (messageName != null) { if (status == TestExecutionResult.Status.FAILED) { String parentId = getParentId(testIdentifier); String nameAndId = " name=\'" + JUnit4TestListener.CLASS_CONFIGURATION + "\' nodeId=\'" + escapeName(getId(testIdentifier)) + "\' parentNodeId=\'" + parentId + "\' "; testFailure(JUnit4TestListener.CLASS_CONFIGURATION, getId(testIdentifier), parentId, messageName, throwableOptional, 0, reason, true); myPrintStream.println("\n##teamcity[testFinished" + nameAndId + "]"); } final Set<TestIdentifier> descendants = myTestPlan != null ? myTestPlan.getDescendants(testIdentifier) : Collections.emptySet(); if (!descendants.isEmpty() && myFinishCount == 0) { for (TestIdentifier childIdentifier : descendants) { testStarted(childIdentifier); testFailure(childIdentifier, MapSerializerUtil.TEST_IGNORED, status == TestExecutionResult.Status.ABORTED ? throwableOptional : null, 0, reason, status == TestExecutionResult.Status.ABORTED); testFinished(childIdentifier, 0); } myFinishCount = 0; } } myPrintStream.println("##teamcity[testSuiteFinished " + idAndName(testIdentifier, displayName) + "]"); } } private boolean hasNonTrivialParent(TestIdentifier testIdentifier) { return testIdentifier.getParentId().isPresent() || (myActiveRoots.size() > 1 && myActiveRoots.contains(testIdentifier)); } protected long getDuration() { return System.currentTimeMillis() - myCurrentTestStart; } private void testStarted(TestIdentifier testIdentifier) { myPrintStream.println("\n##teamcity[testStarted" + idAndName(testIdentifier) + " " + getLocationHint(testIdentifier) + "]"); } private void testFinished(TestIdentifier testIdentifier, long duration) { myPrintStream.println("\n##teamcity[testFinished" + idAndName(testIdentifier) + (duration > 0 ? " duration=\'" + Long.toString(duration) + "\'" : "") + "]"); } private void testFailure(TestIdentifier testIdentifier, String messageName, Throwable ex, long duration, String reason, boolean includeThrowable) { testFailure(testIdentifier.getDisplayName(), getId(testIdentifier), getParentId(testIdentifier), messageName, ex, duration, reason, includeThrowable); } private void testFailure(String methodName, String id, String parentId, String messageName, Throwable ex, long duration, String reason, boolean includeThrowable) { final Map<String, String> attrs = new LinkedHashMap<>(); attrs.put("name", methodName); attrs.put("id", id); attrs.put("nodeId", id); attrs.put("parentNodeId", parentId); if (duration > 0) { attrs.put("duration", Long.toString(duration)); } if (reason != null) { attrs.put("message", reason); } try { if (ex != null) { ComparisonFailureData failureData = null; if (ex instanceof MultipleFailuresError && ((MultipleFailuresError)ex).hasFailures()) { for (Throwable assertionError : ((MultipleFailuresError)ex).getFailures()) { testFailure(methodName, id, parentId, messageName, assertionError, duration, reason, false); } } else if (ex instanceof AssertionFailedError && ((AssertionFailedError)ex).isActualDefined() && ((AssertionFailedError)ex).isExpectedDefined()) { final ValueWrapper actual = ((AssertionFailedError)ex).getActual(); final ValueWrapper expected = ((AssertionFailedError)ex).getExpected(); failureData = new ComparisonFailureData(expected.getStringRepresentation(), actual.getStringRepresentation()); } else { //try to detect failure with junit 4 if present in the classpath try { failureData = ExpectedPatterns.createExceptionNotification(ex); } catch (Throwable ignore) {} } if (includeThrowable || failureData == null) { ComparisonFailureData.registerSMAttributes(failureData, getTrace(ex), ex.getMessage(), attrs, ex, "Comparison Failure: ", "expected: <"); } else { ComparisonFailureData.registerSMAttributes(failureData, "", "", attrs, ex, "", "expected: <"); } } } finally { myPrintStream.println("\n" + MapSerializerUtil.asString(messageName, attrs)); } } protected String getTrace(Throwable ex) { final StringWriter stringWriter = new StringWriter(); final PrintWriter writer = new PrintWriter(stringWriter); ex.printStackTrace(writer); return stringWriter.toString(); } public void setTestPlan(TestPlan testPlan) { myTestPlan = testPlan; } public void sendTree(TestPlan testPlan, String rootName) { myTestPlan = testPlan; myRootName = rootName; if (Boolean.parseBoolean(System.getProperty("idea.junit.show.engines", "true"))) { testPlan.getRoots().stream().filter(root1 -> !testPlan.getChildren(root1).isEmpty()).forEach(myActiveRoots::add); } if (myActiveRoots.size() > 1) { for (TestIdentifier root : myActiveRoots) { sendTreeUnderRoot(testPlan, root, new HashSet<>()); } } else { //skip engine node when one engine available for (TestIdentifier root : testPlan.getRoots()) { assert root.isContainer(); for (TestIdentifier testIdentifier : testPlan.getChildren(root)) { sendTreeUnderRoot(testPlan, testIdentifier, new HashSet<>()); } } } myPrintStream.println("##teamcity[treeEnded]"); } private String getId(TestIdentifier identifier) { return identifier.getUniqueId() + myIdSuffix; } private void sendTreeUnderRoot(TestPlan testPlan, TestIdentifier root, HashSet<TestIdentifier> visited) { final String idAndName = idAndName(root); if (root.isContainer()) { myPrintStream.println("##teamcity[suiteTreeStarted" + idAndName + " " + getLocationHint(root) + "]"); for (TestIdentifier childIdentifier : testPlan.getChildren(root)) { if (visited.add(childIdentifier)) { sendTreeUnderRoot(testPlan, childIdentifier, visited); } else { System.err.println("Identifier \'" + getId(childIdentifier) + "\' is reused"); } } myPrintStream.println("##teamcity[suiteTreeEnded" + idAndName + "]"); } else if (root.isTest()) { myPrintStream.println("##teamcity[suiteTreeNode " + idAndName + " " + getLocationHint(root) + "]"); } } private String idAndName(TestIdentifier testIdentifier) { return idAndName(testIdentifier, testIdentifier.getDisplayName()); } private String idAndName(TestIdentifier testIdentifier, String displayName) { return " id=\'" + escapeName(getId(testIdentifier)) + "\' name=\'" + escapeName(displayName) + "\' nodeId=\'" + escapeName(getId(testIdentifier)) + "\' parentNodeId=\'" + escapeName(getParentId(testIdentifier)) + "\'"; } private String getParentId(TestIdentifier testIdentifier) { Optional<TestIdentifier> parent = myTestPlan.getParent(testIdentifier); if (myActiveRoots.size() <= 1 && !parent.map(identifier -> identifier.getParentId().orElse(null)).isPresent()) { return "0"; } return parent .map(identifier -> identifier.getUniqueId() + myIdSuffix) .orElse("0"); } static String getLocationHint(TestIdentifier root) { return root.getSource() .map(testSource -> getLocationHintValue(testSource)) .filter(maybeLocationHintValue -> !NO_LOCATION_HINT_VALUE.equals(maybeLocationHintValue)) .map(locationHintValue -> "locationHint=\'" + locationHintValue + "\'" + getMetainfo(root)) .orElse(NO_LOCATION_HINT); } private static String getMetainfo(TestIdentifier root) { return root.getSource() .filter(testSource -> testSource instanceof MethodSource) .map(testSource -> " metainfo=\'" + ((MethodSource)testSource).getMethodParameterTypes() + "\'") .orElse(NO_LOCATION_HINT); } static String getLocationHintValue(TestSource testSource) { if (testSource instanceof CompositeTestSource) { CompositeTestSource compositeTestSource = ((CompositeTestSource)testSource); for (TestSource sourceFromComposite : compositeTestSource.getSources()) { String locationHintValue = getLocationHintValue(sourceFromComposite); if (!NO_LOCATION_HINT_VALUE.equals(locationHintValue)) { return locationHintValue; } } return NO_LOCATION_HINT_VALUE; } if (testSource instanceof FileSource) { FileSource fileSource = (FileSource)testSource; File file = fileSource.getFile(); String line = fileSource.getPosition() .map(position -> ":" + position.getLine()) .orElse(""); return "file://" + file.getAbsolutePath() + line; } if (testSource instanceof MethodSource) { MethodSource methodSource = (MethodSource)testSource; return javaLocation(methodSource.getClassName(), methodSource.getMethodName(), true); } if (testSource instanceof ClassSource) { String className = ((ClassSource)testSource).getClassName(); return javaLocation(className, null, false); } return NO_LOCATION_HINT_VALUE; } private static String javaLocation(String className, String maybeMethodName, boolean isTest) { String type = isTest ? "test" : "suite"; String methodName = maybeMethodName == null ? "" : "/" + maybeMethodName; String location = escapeName(className + methodName); return "java:" + type + "://" + location; } private static String escapeName(String str) { return MapSerializerUtil.escapeStr(str, MapSerializerUtil.STD_ESCAPER); } static String getClassName(TestIdentifier description) { return description.getSource().map(source -> { if (source instanceof MethodSource) { return ((MethodSource)source).getClassName(); } if (source instanceof ClassSource) { return ((ClassSource)source).getClassName(); } return null; }).orElse(null); } static String getMethodName(TestIdentifier testIdentifier) { return testIdentifier.getSource().map((source) -> { if (source instanceof MethodSource) { return ((MethodSource)source).getMethodName(); } return null; }).orElse(null); } static String getMethodSignature(TestIdentifier testIdentifier) { return testIdentifier.getSource().map((source) -> { if (source instanceof MethodSource) { String parameterTypes = ((MethodSource)source).getMethodParameterTypes(); return ((MethodSource)source).getMethodName() + (parameterTypes != null ? "(" + parameterTypes + ")" : ""); } return null; }).orElse(null); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jclouds.ssh.jsch; import static com.google.common.base.Objects.equal; import static com.google.common.base.Preconditions.checkNotNull; import org.jclouds.domain.Credentials; import org.jclouds.domain.LoginCredentials; import org.jclouds.javax.annotation.Nullable; import org.jclouds.proxy.ProxyConfig; import org.jclouds.ssh.jsch.JschSshClient.Connection; import com.google.common.base.Objects; import com.google.common.base.Optional; import com.google.common.net.HostAndPort; import com.jcraft.jsch.JSch; import com.jcraft.jsch.Proxy; import com.jcraft.jsch.ProxyHTTP; import com.jcraft.jsch.ProxySOCKS5; import com.jcraft.jsch.Session; import com.jcraft.jsch.agentproxy.Connector; import com.jcraft.jsch.agentproxy.RemoteIdentityRepository; public final class SessionConnection implements Connection<Session> { private Optional<Connector> agentConnector; public static Builder builder() { return new Builder(); } public static final class Builder { private HostAndPort hostAndPort; private LoginCredentials loginCredentials; private Optional<Proxy> proxy = Optional.absent(); private int connectTimeout; private int sessionTimeout; private Optional<Connector> agentConnector; /** * @see SessionConnection#getHostAndPort() */ public Builder hostAndPort(HostAndPort hostAndPort) { this.hostAndPort = hostAndPort; return this; } /** * @see SessionConnection#getLoginCredentials() */ public Builder loginCredentials(LoginCredentials loginCredentials) { this.loginCredentials = loginCredentials; return this; } /** * @see SessionConnection#getProxy() */ public Builder proxy(Proxy proxy) { this.proxy = Optional.fromNullable(proxy); return this; } /** * @see #proxy(Proxy) */ public Builder proxy(ProxyConfig proxyConfig) { Optional<HostAndPort> proxyEndpoint = proxyConfig.getProxy(); if (!proxyEndpoint.isPresent()) return proxy((Proxy) null); Optional<Credentials> creds = proxyConfig.getCredentials(); switch (proxyConfig.getType()) { case HTTP: ProxyHTTP httpProxy = new ProxyHTTP(proxyEndpoint.get().getHostText(), proxyEndpoint.get().getPort()); if (creds.isPresent()) httpProxy.setUserPasswd(creds.get().identity, creds.get().credential); return proxy(httpProxy); case SOCKS: ProxySOCKS5 socksProxy = new ProxySOCKS5(proxyEndpoint.get().getHostText(), proxyEndpoint.get().getPort()); if (creds.isPresent()) socksProxy.setUserPasswd(creds.get().identity, creds.get().credential); return proxy(socksProxy); default: throw new IllegalArgumentException(proxyConfig.getType() + " not supported"); } } /** * @see SessionConnection#getConnectTimeout() */ public Builder connectTimeout(int connectTimeout) { this.connectTimeout = connectTimeout; return this; } /** * @see SessionConnection#getConnectTimeout() */ public Builder sessionTimeout(int sessionTimeout) { this.sessionTimeout = sessionTimeout; return this; } public SessionConnection build() { return new SessionConnection(hostAndPort, loginCredentials, proxy, connectTimeout, sessionTimeout, agentConnector); } public Builder from(SessionConnection in) { return hostAndPort(in.hostAndPort).loginCredentials(in.loginCredentials).proxy(in.proxy.orNull()) .connectTimeout(in.connectTimeout).sessionTimeout(in.sessionTimeout).agentConnector(in.agentConnector); } public Builder agentConnector(Optional<Connector> agentConnector) { this.agentConnector = agentConnector; return this; } } private SessionConnection(HostAndPort hostAndPort, LoginCredentials loginCredentials, Optional<Proxy> proxy, int connectTimeout, int sessionTimeout, Optional<Connector> agentConnector) { this.hostAndPort = checkNotNull(hostAndPort, "hostAndPort"); this.loginCredentials = checkNotNull(loginCredentials, "loginCredentials for %", hostAndPort); this.connectTimeout = connectTimeout; this.sessionTimeout = sessionTimeout; this.proxy = checkNotNull(proxy, "proxy for %", hostAndPort); this.agentConnector = checkNotNull(agentConnector, "agentConnector for %", hostAndPort); } private static final byte[] emptyPassPhrase = new byte[0]; private final HostAndPort hostAndPort; private final LoginCredentials loginCredentials; private final Optional<Proxy> proxy; private final int connectTimeout; private final int sessionTimeout; private transient Session session; @Override public void clear() { if (session != null && session.isConnected()) { session.disconnect(); session = null; } } @Override public Session create() throws Exception { JSch jsch = new JSch(); session = jsch .getSession(loginCredentials.getUser(), hostAndPort.getHostText(), hostAndPort.getPortOrDefault(22)); if (sessionTimeout != 0) session.setTimeout(sessionTimeout); if (loginCredentials.hasUnencryptedPrivateKey()) { byte[] privateKey = loginCredentials.getOptionalPrivateKey().get().getBytes(); jsch.addIdentity(loginCredentials.getUser(), privateKey, null, emptyPassPhrase); } else if (loginCredentials.getOptionalPassword().isPresent()) { session.setPassword(loginCredentials.getOptionalPassword().orNull()); } else if (agentConnector.isPresent()) { JSch.setConfig("PreferredAuthentications", "publickey"); jsch.setIdentityRepository(new RemoteIdentityRepository(agentConnector.get())); } java.util.Properties config = new java.util.Properties(); config.put("StrictHostKeyChecking", "no"); session.setConfig(config); if (proxy.isPresent()) session.setProxy(proxy.get()); session.connect(connectTimeout); return session; } /** * @return host and port, where port if not present defaults to {@code 22} */ public HostAndPort getHostAndPort() { return hostAndPort; } /** * * @return login used in this session */ public LoginCredentials getLoginCredentials() { return loginCredentials; } /** * * @return proxy used for this connection */ public Optional<Proxy> getProxy() { return proxy; } /** * * @return how long to wait for the initial connection to be made */ public int getConnectTimeout() { return connectTimeout; } /** * * @return how long to keep the session open, or {@code 0} for indefinitely */ public int getSessionTimeout() { return sessionTimeout; } /** * * @return the current session or {@code null} if not connected */ @Nullable public Session getSession() { return session; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; SessionConnection that = SessionConnection.class.cast(o); return equal(this.hostAndPort, that.hostAndPort) && equal(this.loginCredentials, that.loginCredentials) && equal(this.session, that.session); } @Override public int hashCode() { return Objects.hashCode(hostAndPort, loginCredentials, session); } @Override public String toString() { return Objects.toStringHelper("").omitNullValues() .add("hostAndPort", hostAndPort).add("loginUser", loginCredentials.getUser()) .add("session", session != null ? session.hashCode() : null) .add("connectTimeout", connectTimeout) .add("proxy", proxy.orNull()) .add("sessionTimeout", sessionTimeout).toString(); } }
/******************************************************************************* * Copyright (c) 2015-2018 Skymind, Inc. * * This program and the accompanying materials are made available under the * terms of the Apache License, Version 2.0 which is available at * https://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * * SPDX-License-Identifier: Apache-2.0 ******************************************************************************/ package org.deeplearning4j.clustering.algorithm; import lombok.AccessLevel; import lombok.NoArgsConstructor; import lombok.extern.slf4j.Slf4j; import lombok.val; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.math3.ml.clustering.KMeansPlusPlusClusterer; import org.deeplearning4j.clustering.cluster.Cluster; import org.deeplearning4j.clustering.cluster.ClusterSet; import org.deeplearning4j.clustering.cluster.ClusterUtils; import org.deeplearning4j.clustering.cluster.Point; import org.deeplearning4j.clustering.info.ClusterSetInfo; import org.deeplearning4j.clustering.iteration.IterationHistory; import org.deeplearning4j.clustering.iteration.IterationInfo; import org.deeplearning4j.clustering.strategy.ClusteringStrategy; import org.deeplearning4j.clustering.strategy.ClusteringStrategyType; import org.deeplearning4j.clustering.strategy.OptimisationStrategy; import org.deeplearning4j.clustering.util.MultiThreadUtils; import org.nd4j.base.Preconditions; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.factory.Nd4j; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import java.util.Random; import java.util.concurrent.ExecutorService; /** * * adapted to ndarray matrices * * @author Adam Gibson * @author Julien Roch * */ @Slf4j @NoArgsConstructor(access = AccessLevel.PROTECTED) public class BaseClusteringAlgorithm implements ClusteringAlgorithm, Serializable { private static final long serialVersionUID = 338231277453149972L; private ClusteringStrategy clusteringStrategy; private IterationHistory iterationHistory; private int currentIteration = 0; private ClusterSet clusterSet; private List<Point> initialPoints; private transient ExecutorService exec; private boolean useKmeansPlusPlus; protected BaseClusteringAlgorithm(ClusteringStrategy clusteringStrategy, boolean useKmeansPlusPlus) { this.clusteringStrategy = clusteringStrategy; this.exec = MultiThreadUtils.newExecutorService(); this.useKmeansPlusPlus = useKmeansPlusPlus; } /** * * @param clusteringStrategy * @return */ public static BaseClusteringAlgorithm setup(ClusteringStrategy clusteringStrategy, boolean useKmeansPlusPlus) { return new BaseClusteringAlgorithm(clusteringStrategy, useKmeansPlusPlus); } /** * * @param points * @return */ public ClusterSet applyTo(List<Point> points) { resetState(points); initClusters(useKmeansPlusPlus); iterations(); return clusterSet; } private void resetState(List<Point> points) { this.iterationHistory = new IterationHistory(); this.currentIteration = 0; this.clusterSet = null; this.initialPoints = points; } /** Run clustering iterations until a * termination condition is hit. * This is done by first classifying all points, * and then updating cluster centers based on * those classified points */ private void iterations() { int iterationCount = 0; while ((clusteringStrategy.getTerminationCondition() != null && !clusteringStrategy.getTerminationCondition().isSatisfied(iterationHistory)) || iterationHistory.getMostRecentIterationInfo().isStrategyApplied()) { currentIteration++; removePoints(); classifyPoints(); applyClusteringStrategy(); log.trace("Completed clustering iteration {}", ++iterationCount); } } protected void classifyPoints() { //Classify points. This also adds each point to the ClusterSet ClusterSetInfo clusterSetInfo = ClusterUtils.classifyPoints(clusterSet, initialPoints, exec); //Update the cluster centers, based on the points within each cluster ClusterUtils.refreshClustersCenters(clusterSet, clusterSetInfo, exec); iterationHistory.getIterationsInfos().put(currentIteration, new IterationInfo(currentIteration, clusterSetInfo)); } /** * Initialize the * cluster centers at random */ protected void initClusters(boolean kMeansPlusPlus) { log.info("Generating initial clusters"); List<Point> points = new ArrayList<>(initialPoints); //Initialize the ClusterSet with a single cluster center (based on position of one of the points chosen randomly) val random = Nd4j.getRandom(); Distance distanceFn = clusteringStrategy.getDistanceFunction(); int initialClusterCount = clusteringStrategy.getInitialClusterCount(); clusterSet = new ClusterSet(distanceFn, clusteringStrategy.inverseDistanceCalculation(), new long[]{initialClusterCount, points.get(0).getArray().length()}); clusterSet.addNewClusterWithCenter(points.remove(random.nextInt(points.size()))); //dxs: distances between // each point and nearest cluster to that point INDArray dxs = Nd4j.create(points.size()); dxs.addi(clusteringStrategy.inverseDistanceCalculation() ? -Double.MAX_VALUE : Double.MAX_VALUE); //Generate the initial cluster centers, by randomly selecting a point between 0 and max distance //Thus, we are more likely to select (as a new cluster center) a point that is far from an existing cluster while (clusterSet.getClusterCount() < initialClusterCount && !points.isEmpty()) { dxs = ClusterUtils.computeSquareDistancesFromNearestCluster(clusterSet, points, dxs, exec); double summed = Nd4j.sum(dxs).getDouble(0); double r = kMeansPlusPlus ? random.nextDouble() * summed: random.nextFloat() * dxs.maxNumber().doubleValue(); for (int i = 0; i < dxs.length(); i++) { double distance = dxs.getDouble(i); Preconditions.checkState(distance >= 0, "Encountered negative distance: distance function is not valid? Distance " + "function must return values >= 0, got distance %s for function s", distance, distanceFn); if (dxs.getDouble(i) >= r) { clusterSet.addNewClusterWithCenter(points.remove(i)); dxs = Nd4j.create(ArrayUtils.remove(dxs.data().asDouble(), i)); break; } } } ClusterSetInfo initialClusterSetInfo = ClusterUtils.computeClusterSetInfo(clusterSet); iterationHistory.getIterationsInfos().put(currentIteration, new IterationInfo(currentIteration, initialClusterSetInfo)); } protected void applyClusteringStrategy() { if (!isStrategyApplicableNow()) return; ClusterSetInfo clusterSetInfo = iterationHistory.getMostRecentClusterSetInfo(); if (!clusteringStrategy.isAllowEmptyClusters()) { int removedCount = removeEmptyClusters(clusterSetInfo); if (removedCount > 0) { iterationHistory.getMostRecentIterationInfo().setStrategyApplied(true); if (clusteringStrategy.isStrategyOfType(ClusteringStrategyType.FIXED_CLUSTER_COUNT) && clusterSet.getClusterCount() < clusteringStrategy.getInitialClusterCount()) { int splitCount = ClusterUtils.splitMostSpreadOutClusters(clusterSet, clusterSetInfo, clusteringStrategy.getInitialClusterCount() - clusterSet.getClusterCount(), exec); if (splitCount > 0) iterationHistory.getMostRecentIterationInfo().setStrategyApplied(true); } } } if (clusteringStrategy.isStrategyOfType(ClusteringStrategyType.OPTIMIZATION)) optimize(); } protected void optimize() { ClusterSetInfo clusterSetInfo = iterationHistory.getMostRecentClusterSetInfo(); OptimisationStrategy optimization = (OptimisationStrategy) clusteringStrategy; boolean applied = ClusterUtils.applyOptimization(optimization, clusterSet, clusterSetInfo, exec); iterationHistory.getMostRecentIterationInfo().setStrategyApplied(applied); } private boolean isStrategyApplicableNow() { return clusteringStrategy.isOptimizationDefined() && iterationHistory.getIterationCount() != 0 && clusteringStrategy.isOptimizationApplicableNow(iterationHistory); } protected int removeEmptyClusters(ClusterSetInfo clusterSetInfo) { List<Cluster> removedClusters = clusterSet.removeEmptyClusters(); clusterSetInfo.removeClusterInfos(removedClusters); return removedClusters.size(); } protected void removePoints() { clusterSet.removePoints(); } }
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.master.RegionState; import org.apache.hadoop.hbase.shaded.com.google.protobuf.UnsafeByteOperations; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSTableDescriptors; import org.apache.hadoop.hbase.util.MD5Hash; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; @Category({RegionServerTests.class, SmallTests.class}) public class TestHRegionInfo { @Rule public TestName name = new TestName(); @Test public void testPb() throws DeserializationException { HRegionInfo hri = HRegionInfo.FIRST_META_REGIONINFO; byte [] bytes = hri.toByteArray(); HRegionInfo pbhri = HRegionInfo.parseFrom(bytes); assertTrue(hri.equals(pbhri)); } @Test public void testReadAndWriteHRegionInfoFile() throws IOException, InterruptedException { HBaseTestingUtility htu = new HBaseTestingUtility(); HRegionInfo hri = HRegionInfo.FIRST_META_REGIONINFO; Path basedir = htu.getDataTestDir(); // Create a region. That'll write the .regioninfo file. FSTableDescriptors fsTableDescriptors = new FSTableDescriptors(htu.getConfiguration()); HRegion r = HBaseTestingUtility.createRegionAndWAL(hri, basedir, htu.getConfiguration(), fsTableDescriptors.get(TableName.META_TABLE_NAME)); // Get modtime on the file. long modtime = getModTime(r); HBaseTestingUtility.closeRegionAndWAL(r); Thread.sleep(1001); r = HRegion.openHRegion(basedir, hri, fsTableDescriptors.get(TableName.META_TABLE_NAME), null, htu.getConfiguration()); // Ensure the file is not written for a second time. long modtime2 = getModTime(r); assertEquals(modtime, modtime2); // Now load the file. HRegionInfo deserializedHri = HRegionFileSystem.loadRegionInfoFileContent( r.getRegionFileSystem().getFileSystem(), r.getRegionFileSystem().getRegionDir()); assertTrue(hri.equals(deserializedHri)); HBaseTestingUtility.closeRegionAndWAL(r); } long getModTime(final HRegion r) throws IOException { FileStatus[] statuses = r.getRegionFileSystem().getFileSystem().listStatus( new Path(r.getRegionFileSystem().getRegionDir(), HRegionFileSystem.REGION_INFO_FILE)); assertTrue(statuses != null && statuses.length == 1); return statuses[0].getModificationTime(); } @Test public void testCreateHRegionInfoName() throws Exception { final String tableName = name.getMethodName(); final TableName tn = TableName.valueOf(tableName); String startKey = "startkey"; final byte[] sk = Bytes.toBytes(startKey); String id = "id"; // old format region name byte [] name = HRegionInfo.createRegionName(tn, sk, id, false); String nameStr = Bytes.toString(name); assertEquals(tableName + "," + startKey + "," + id, nameStr); // new format region name. String md5HashInHex = MD5Hash.getMD5AsHex(name); assertEquals(HRegionInfo.MD5_HEX_LENGTH, md5HashInHex.length()); name = HRegionInfo.createRegionName(tn, sk, id, true); nameStr = Bytes.toString(name); assertEquals(tableName + "," + startKey + "," + id + "." + md5HashInHex + ".", nameStr); } @Test public void testContainsRange() { HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf(name.getMethodName())); HRegionInfo hri = new HRegionInfo( tableDesc.getTableName(), Bytes.toBytes("a"), Bytes.toBytes("g")); // Single row range at start of region assertTrue(hri.containsRange(Bytes.toBytes("a"), Bytes.toBytes("a"))); // Fully contained range assertTrue(hri.containsRange(Bytes.toBytes("b"), Bytes.toBytes("c"))); // Range overlapping start of region assertTrue(hri.containsRange(Bytes.toBytes("a"), Bytes.toBytes("c"))); // Fully contained single-row range assertTrue(hri.containsRange(Bytes.toBytes("c"), Bytes.toBytes("c"))); // Range that overlaps end key and hence doesn't fit assertFalse(hri.containsRange(Bytes.toBytes("a"), Bytes.toBytes("g"))); // Single row range on end key assertFalse(hri.containsRange(Bytes.toBytes("g"), Bytes.toBytes("g"))); // Single row range entirely outside assertFalse(hri.containsRange(Bytes.toBytes("z"), Bytes.toBytes("z"))); // Degenerate range try { hri.containsRange(Bytes.toBytes("z"), Bytes.toBytes("a")); fail("Invalid range did not throw IAE"); } catch (IllegalArgumentException iae) { } } @Test public void testLastRegionCompare() { HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf(name.getMethodName())); HRegionInfo hrip = new HRegionInfo( tableDesc.getTableName(), Bytes.toBytes("a"), new byte[0]); HRegionInfo hric = new HRegionInfo( tableDesc.getTableName(), Bytes.toBytes("a"), Bytes.toBytes("b")); assertTrue(hrip.compareTo(hric) > 0); } @Test public void testMetaTables() { assertTrue(HRegionInfo.FIRST_META_REGIONINFO.isMetaTable()); } @Test public void testComparator() { final TableName tableName = TableName.valueOf(name.getMethodName()); byte[] empty = new byte[0]; HRegionInfo older = new HRegionInfo(tableName, empty, empty, false, 0L); HRegionInfo newer = new HRegionInfo(tableName, empty, empty, false, 1L); assertTrue(older.compareTo(newer) < 0); assertTrue(newer.compareTo(older) > 0); assertTrue(older.compareTo(older) == 0); assertTrue(newer.compareTo(newer) == 0); } @Test public void testRegionNameForRegionReplicas() throws Exception { String tableName = name.getMethodName(); final TableName tn = TableName.valueOf(tableName); String startKey = "startkey"; final byte[] sk = Bytes.toBytes(startKey); String id = "id"; // assert with only the region name without encoding // primary, replicaId = 0 byte [] name = HRegionInfo.createRegionName(tn, sk, Bytes.toBytes(id), 0, false); String nameStr = Bytes.toString(name); assertEquals(tableName + "," + startKey + "," + id, nameStr); // replicaId = 1 name = HRegionInfo.createRegionName(tn, sk, Bytes.toBytes(id), 1, false); nameStr = Bytes.toString(name); assertEquals(tableName + "," + startKey + "," + id + "_" + String.format(HRegionInfo.REPLICA_ID_FORMAT, 1), nameStr); // replicaId = max name = HRegionInfo.createRegionName(tn, sk, Bytes.toBytes(id), 0xFFFF, false); nameStr = Bytes.toString(name); assertEquals(tableName + "," + startKey + "," + id + "_" + String.format(HRegionInfo.REPLICA_ID_FORMAT, 0xFFFF), nameStr); } @Test public void testParseName() throws IOException { final TableName tableName = TableName.valueOf(name.getMethodName()); byte[] startKey = Bytes.toBytes("startKey"); long regionId = System.currentTimeMillis(); int replicaId = 42; // test without replicaId byte[] regionName = HRegionInfo.createRegionName(tableName, startKey, regionId, false); byte[][] fields = HRegionInfo.parseRegionName(regionName); assertArrayEquals(Bytes.toString(fields[0]),tableName.getName(), fields[0]); assertArrayEquals(Bytes.toString(fields[1]),startKey, fields[1]); assertArrayEquals(Bytes.toString(fields[2]), Bytes.toBytes(Long.toString(regionId)),fields[2]); assertEquals(3, fields.length); // test with replicaId regionName = HRegionInfo.createRegionName(tableName, startKey, regionId, replicaId, false); fields = HRegionInfo.parseRegionName(regionName); assertArrayEquals(Bytes.toString(fields[0]),tableName.getName(), fields[0]); assertArrayEquals(Bytes.toString(fields[1]),startKey, fields[1]); assertArrayEquals(Bytes.toString(fields[2]), Bytes.toBytes(Long.toString(regionId)),fields[2]); assertArrayEquals(Bytes.toString(fields[3]), Bytes.toBytes( String.format(HRegionInfo.REPLICA_ID_FORMAT, replicaId)), fields[3]); } @Test public void testConvert() { final TableName tableName = TableName.valueOf("ns1:" + name.getMethodName()); byte[] startKey = Bytes.toBytes("startKey"); byte[] endKey = Bytes.toBytes("endKey"); boolean split = false; long regionId = System.currentTimeMillis(); int replicaId = 42; HRegionInfo hri = new HRegionInfo(tableName, startKey, endKey, split, regionId, replicaId); // convert two times, compare HRegionInfo convertedHri = HRegionInfo.convert(HRegionInfo.convert(hri)); assertEquals(hri, convertedHri); // test convert RegionInfo without replicaId RegionInfo info = RegionInfo.newBuilder() .setTableName(HBaseProtos.TableName.newBuilder() .setQualifier(UnsafeByteOperations.unsafeWrap(tableName.getQualifier())) .setNamespace(UnsafeByteOperations.unsafeWrap(tableName.getNamespace())) .build()) .setStartKey(UnsafeByteOperations.unsafeWrap(startKey)) .setEndKey(UnsafeByteOperations.unsafeWrap(endKey)) .setSplit(split) .setRegionId(regionId) .build(); convertedHri = HRegionInfo.convert(info); HRegionInfo expectedHri = new HRegionInfo(tableName, startKey, endKey, split, regionId, 0); // expecting default replicaId assertEquals(expectedHri, convertedHri); } @Test public void testRegionDetailsForDisplay() throws IOException { byte[] startKey = new byte[] {0x01, 0x01, 0x02, 0x03}; byte[] endKey = new byte[] {0x01, 0x01, 0x02, 0x04}; Configuration conf = new Configuration(); conf.setBoolean("hbase.display.keys", false); HRegionInfo h = new HRegionInfo(TableName.valueOf(name.getMethodName()), startKey, endKey); checkEquality(h, conf); // check HRIs with non-default replicaId h = new HRegionInfo(TableName.valueOf(name.getMethodName()), startKey, endKey, false, System.currentTimeMillis(), 1); checkEquality(h, conf); Assert.assertArrayEquals(HRegionInfo.HIDDEN_END_KEY, HRegionInfo.getEndKeyForDisplay(h, conf)); Assert.assertArrayEquals(HRegionInfo.HIDDEN_START_KEY, HRegionInfo.getStartKeyForDisplay(h, conf)); RegionState state = new RegionState(h, RegionState.State.OPEN); String descriptiveNameForDisplay = HRegionInfo.getDescriptiveNameFromRegionStateForDisplay(state, conf); checkDescriptiveNameEquality(descriptiveNameForDisplay,state.toDescriptiveString(), startKey); conf.setBoolean("hbase.display.keys", true); Assert.assertArrayEquals(endKey, HRegionInfo.getEndKeyForDisplay(h, conf)); Assert.assertArrayEquals(startKey, HRegionInfo.getStartKeyForDisplay(h, conf)); Assert.assertEquals(state.toDescriptiveString(), HRegionInfo.getDescriptiveNameFromRegionStateForDisplay(state, conf)); } private void checkDescriptiveNameEquality(String descriptiveNameForDisplay, String origDesc, byte[] startKey) { // except for the "hidden-start-key" substring everything else should exactly match String firstPart = descriptiveNameForDisplay.substring(0, descriptiveNameForDisplay.indexOf(new String(HRegionInfo.HIDDEN_START_KEY))); String secondPart = descriptiveNameForDisplay.substring( descriptiveNameForDisplay.indexOf(new String(HRegionInfo.HIDDEN_START_KEY)) + HRegionInfo.HIDDEN_START_KEY.length); String firstPartOrig = origDesc.substring(0, origDesc.indexOf(Bytes.toStringBinary(startKey))); String secondPartOrig = origDesc.substring( origDesc.indexOf(Bytes.toStringBinary(startKey)) + Bytes.toStringBinary(startKey).length()); assert(firstPart.equals(firstPartOrig)); assert(secondPart.equals(secondPartOrig)); } private void checkEquality(HRegionInfo h, Configuration conf) throws IOException { byte[] modifiedRegionName = HRegionInfo.getRegionNameForDisplay(h, conf); byte[][] modifiedRegionNameParts = HRegionInfo.parseRegionName(modifiedRegionName); byte[][] regionNameParts = HRegionInfo.parseRegionName(h.getRegionName()); //same number of parts assert(modifiedRegionNameParts.length == regionNameParts.length); for (int i = 0; i < regionNameParts.length; i++) { // all parts should match except for [1] where in the modified one, // we should have "hidden_start_key" if (i != 1) { Assert.assertArrayEquals(regionNameParts[i], modifiedRegionNameParts[i]); } else { Assert.assertNotEquals(regionNameParts[i][0], modifiedRegionNameParts[i][0]); Assert.assertArrayEquals(modifiedRegionNameParts[1], HRegionInfo.getStartKeyForDisplay(h, conf)); } } } }
/* Copyright 2013, Lorand Bendig Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.twitter.ambrose.hive; import java.lang.reflect.Field; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.mapred.Counters; import org.apache.hadoop.mapred.Counters.Counter; import org.apache.hadoop.mapred.RunningJob; import com.twitter.ambrose.model.hadoop.CounterGroup; /** * Utility for Ambrose-Hive related operations * * @author Lorand Bendig <lbendig@gmail.com> * */ public class AmbroseHiveUtil { private static final Pattern STAGEID_PATTERN = Pattern.compile("^.*\\((Stage\\-\\d+)\\)$", Pattern.DOTALL); private AmbroseHiveUtil() { throw new AssertionError("shouldn't be instantiated!"); } /** * Constructs the jobTracker url based on the jobId. * * @param jobID * @param conf * @return * @see org.apache.hadoop.hive.hwi#getJobTrackerURL(String) */ public static String getJobTrackerURL(String jobID, HiveConf conf) { String jt = conf.get("mapred.job.tracker"); String jth = conf.get("mapred.job.tracker.http.address"); String[] jtparts = null; String[] jthttpParts = null; if (jt.equalsIgnoreCase("local")) { jtparts = new String[2]; jtparts[0] = "local"; jtparts[1] = ""; } else { jtparts = jt.split(":"); } if (jth.contains(":")) { jthttpParts = jth.split(":"); } else { jthttpParts = new String[2]; jthttpParts[0] = jth; jthttpParts[1] = ""; } return jtparts[0] + ":" + jthttpParts[1] + "/jobdetails.jsp?jobid=" + jobID + "&refresh=30"; } /** * Constructs Countergroups from job runtime statistics * * @param counterNameToValue * @return */ public static Map<String, CounterGroup> counterGroupInfoMap(Map<String, Double> counterNameToValue) { Counters counters = new Counters(); for (Map.Entry<String, ? extends Number> entry : counterNameToValue.entrySet()) { String[] cNames = entry.getKey().split("::"); String groupName = cNames[0]; String counterName = cNames[1]; Counter counter = counters.findCounter(groupName, counterName); //counter.setValue(entry.getValue().longValue()); } return CounterGroup.counterGroupInfoMap(counters); } public static String asDisplayId(String queryId, String jobIDStr, String nodeId) { String stageName = nodeId.substring(0, nodeId.indexOf('_')); String wfIdLastPart = queryId.substring(queryId.lastIndexOf('-') + 1, queryId.length()); String displayJobId = String.format(jobIDStr + " (%s, query-id: ...%s)", stageName, wfIdLastPart); return displayJobId; } public static String getNodeIdFromNodeName(Configuration conf, String nodeName) { return nodeName + "_" + getHiveQueryId(conf); } /** * Returns the nodeId of the given running job <br> * E.g: Stage-1_[queryId] * * @param conf * @param runningJob * @return */ public static String getNodeIdFromJob(Configuration conf, RunningJob runningJob) { return getNodeIdFromJobName(conf, runningJob.getJobName()); } /** * Retrieves the nodeId from the Hive SQL command <br> * * @param conf * @param jobName * @return */ private static String getNodeIdFromJobName(Configuration conf, String jobName) { Matcher matcher = STAGEID_PATTERN.matcher(jobName); if (matcher.find()) { return getNodeIdFromNodeName(conf, matcher.group(1)); } return null; } /** * Returns the Hive query id which identifies the current workflow <br> * Format: hive_[queryId] * * @param conf * @return */ public static String getHiveQueryId(Configuration conf) { return HiveConf.getVar(conf, ConfVars.HIVEQUERYID); } /** * Gets the temporary directory of the given job * * @param conf * @return */ public static String getJobTmpDir(Configuration conf) { String fsNameVar = HiveConf.getVar(conf, ConfVars.HADOOPFS); String fsName = fsNameVar.substring(0, fsNameVar.length() - 1); return fsName + HiveConf.getVar(conf, ConfVars.SCRATCHDIR); } /** * Gets the temporary local directory of the given job * * @param conf * @return */ public static String getJobTmpLocalDir(Configuration conf) { String fsNameVar = HiveConf.getVar(conf, ConfVars.HADOOPFS); String fsName = fsNameVar.substring(0, fsNameVar.length() - 1); /* no LOCALSCRATCHDIR in hive 1.1.4, so change it. * return fsName + HiveConf.getVar(conf, ConfVars.LOCALSCRATCHDIR, ""); * */ return fsName + "/tmp/" + System.getProperty("user.name"); } /** * Gets (non-accessible) field of a class * * @param clazz * @param fieldName * @return * @throws Exception */ public static Field getInternalField(Class<?> clazz, String fieldName) throws Exception { Field field = clazz.getDeclaredField(fieldName); field.setAccessible(true); return field; } /** * Compares two float values * * @param f1 * @param f2 * @return true if f1 and f2 are equal */ public static boolean isEqual(float f1, float f2) { final float delta = 0.001f; return (Math.abs(f1 - f2) < delta) ? true : false; } }
// Copyright (C) 2012 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.httpd.plugins; import com.google.common.collect.Maps; import com.google.gerrit.common.Version; import com.google.gerrit.server.plugins.Plugin; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.InputStream; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.net.URL; import java.util.Collections; import java.util.Enumeration; import java.util.Set; import java.util.concurrent.ConcurrentMap; import javax.servlet.RequestDispatcher; import javax.servlet.Servlet; import javax.servlet.ServletContext; class PluginServletContext { private static final Logger log = LoggerFactory.getLogger("plugin"); static ServletContext create(Plugin plugin, String contextPath) { return (ServletContext) Proxy.newProxyInstance( PluginServletContext.class.getClassLoader(), new Class[] {ServletContext.class, API.class}, new Handler(plugin, contextPath)); } private PluginServletContext() { } private static class Handler implements InvocationHandler, API { private final Plugin plugin; private final String contextPath; private final ConcurrentMap<String, Object> attributes; Handler(Plugin plugin, String contextPath) { this.plugin = plugin; this.contextPath = contextPath; this.attributes = Maps.newConcurrentMap(); } @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { Method handler; try { handler = API.class.getDeclaredMethod( method.getName(), method.getParameterTypes()); } catch (NoSuchMethodException e) { throw new NoSuchMethodError(String.format( "%s does not implement%s", PluginServletContext.class, method.toGenericString())); } return handler.invoke(this, args); } @Override public String getContextPath() { return contextPath; } @Override public String getInitParameter(String name) { return null; } @SuppressWarnings("rawtypes") @Override public Enumeration getInitParameterNames() { return Collections.enumeration(Collections.emptyList()); } @Override public ServletContext getContext(String name) { return null; } @Override public RequestDispatcher getNamedDispatcher(String name) { return null; } @Override public RequestDispatcher getRequestDispatcher(String name) { return null; } @Override public URL getResource(String name) { return null; } @Override public InputStream getResourceAsStream(String name) { return null; } @SuppressWarnings("rawtypes") @Override public Set getResourcePaths(String name) { return null; } @Override public Servlet getServlet(String name) { return null; } @Override public String getRealPath(String name) { return null; } @Override public String getServletContextName() { return plugin.getName(); } @SuppressWarnings("rawtypes") @Override public Enumeration getServletNames() { return Collections.enumeration(Collections.emptyList()); } @SuppressWarnings("rawtypes") @Override public Enumeration getServlets() { return Collections.enumeration(Collections.emptyList()); } @Override public void log(Exception reason, String msg) { log(msg, reason); } @Override public void log(String msg) { log(msg, null); } @Override public void log(String msg, Throwable reason) { log.warn(String.format("[plugin %s] %s", plugin.getName(), msg), reason); } @Override public Object getAttribute(String name) { return attributes.get(name); } @Override public Enumeration<String> getAttributeNames() { return Collections.enumeration(attributes.keySet()); } @Override public void setAttribute(String name, Object value) { attributes.put(name, value); } @Override public void removeAttribute(String name) { attributes.remove(name); } @Override public String getMimeType(String file) { return null; } @Override public int getMajorVersion() { return 2; } @Override public int getMinorVersion() { return 5; } @Override public String getServerInfo() { String v = Version.getVersion(); return "Gerrit Code Review/" + (v != null ? v : "dev"); } } static interface API { String getContextPath(); String getInitParameter(String name); @SuppressWarnings("rawtypes") Enumeration getInitParameterNames(); ServletContext getContext(String name); RequestDispatcher getNamedDispatcher(String name); RequestDispatcher getRequestDispatcher(String name); URL getResource(String name); InputStream getResourceAsStream(String name); @SuppressWarnings("rawtypes") Set getResourcePaths(String name); Servlet getServlet(String name); String getRealPath(String name); String getServletContextName(); @SuppressWarnings("rawtypes") Enumeration getServletNames(); @SuppressWarnings("rawtypes") Enumeration getServlets(); void log(Exception reason, String msg); void log(String msg); void log(String msg, Throwable reason); Object getAttribute(String name); Enumeration<String> getAttributeNames(); void setAttribute(String name, Object value); void removeAttribute(String name); String getMimeType(String file); int getMajorVersion(); int getMinorVersion(); String getServerInfo(); } }
package aserg.gtf; import java.io.IOException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Map.Entry; import org.apache.log4j.Logger; import aserg.gtf.commands.SystemCommandExecutor; import aserg.gtf.dao.ProjectInfoDAO; import aserg.gtf.model.DeveloperInfo; import aserg.gtf.model.LogCommitInfo; import aserg.gtf.model.NewFileInfo; import aserg.gtf.model.ProjectInfo; import aserg.gtf.model.ProjectStatus; import aserg.gtf.model.authorship.Repository; import aserg.gtf.model.newstudy.Measure; import aserg.gtf.task.DOACalculator; import aserg.gtf.task.SimpleAliasHandler; import aserg.gtf.task.extractor.FileInfoExtractor; import aserg.gtf.task.extractor.GitLogExtractor; import aserg.gtf.task.extractor.LinguistExtractor; import aserg.gtf.truckfactor.PrunedGreedyTruckFactor; import aserg.gtf.truckfactor.TFInfo; import aserg.gtf.truckfactor.TruckFactor; import aserg.gtf.util.LineInfo; public class CommonMethods { private static final Logger LOGGER = Logger.getLogger(GitTruckFactor.class); protected FileInfoExtractor fileExtractor; protected LinguistExtractor linguistExtractor; public GitLogExtractor gitLogExtractor; private String repositoryPath; private String repositoryName; private static Map<String, List<LineInfo>> aliasInfo; public CommonMethods(String repositoryPath, String repositoryName) { this.repositoryName = repositoryName; this.repositoryPath = repositoryPath; // try { // filesInfo = FileInfoReader.getFileInfo("repo_info/filtered-files.txt"); // } catch (IOException e) { // LOGGER.warn("Not possible to read repo_info/filtered-files.txt file. File filter step will not be executed!"); // filesInfo = null; // } // try { // aliasInfo = FileInfoReader.getFileInfo("repo_info/alias.txt"); // } catch (IOException e) { // LOGGER.warn("Not possible to read repo_info/alias.txt file. Aliases treating step will not be executed!"); // aliasInfo = null; // } // try { // modulesInfo = FileInfoReader.getFileInfo("repo_info/modules.txt"); // } catch (IOException e) { // LOGGER.warn("Not possible to read repo_info/modules.txt file. No modules info will be setted!"); // modulesInfo = null; // } fileExtractor = new FileInfoExtractor(repositoryPath, repositoryName); linguistExtractor = new LinguistExtractor(repositoryPath, repositoryName); // aliasHandler = aliasInfo == null ? null : new NewAliasHandler(aliasInfo.get(repositoryName)); gitLogExtractor = new GitLogExtractor(repositoryPath, repositoryName); } public void replaceNamesInLogCommitFile( List<LineInfo> repoAliasInfo) { Path path = Paths.get(repositoryPath+"commitinfo.log"); Charset charset = StandardCharsets.UTF_8; try { String content = new String(Files.readAllBytes(path), charset); for (LineInfo lineInfo : repoAliasInfo) { content = content.replace(";-"+lineInfo.getValues().get(1)+"-;", ";-"+lineInfo.getValues().get(0)+"-;"); } Files.write(path, content.getBytes(charset)); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } public void insertAdditionalInfo(Measure measure, String scriptsPath, Map<String, LogCommitInfo> allRepoCommits, List<LogCommitInfo> sortedCommitList) throws IOException, Exception { String commitSha = getNearCommit(measure.getLastTFLeaverDate(), sortedCommitList).getSha(); // GET Repository commits Map<String, LogCommitInfo> partialRepoCommits = removeCommitsAfter(allRepoCommits, commitSha); Map<String, Integer> mapIds = new SimpleAliasHandler().execute(repositoryName, partialRepoCommits); Map<Integer, DeveloperInfo> repositoryDevelopers = getRepositoryDevelopers(partialRepoCommits, mapIds); //Extract file info at the new moment String stdOut = createAndExecuteCommand(scriptsPath+"getInfoAtSpecifcCommit.sh "+ repositoryPath + " " + commitSha); // System.out.println(stdOut); // initializeExtractors(repositoryPath, repositoryName); // GET Repository files List<NewFileInfo> files = fileExtractor.execute(); files = linguistExtractor.setNotLinguist(files); measure.setEventNCommits(partialRepoCommits.size()); measure.setEventNDevs(getNAuthors(repositoryDevelopers)); measure.setEventNAllFiles(files.size()); int count=0; for (NewFileInfo newFileInfo : files) { if(newFileInfo.getFiltered()==false) count++; } measure.setEventNSourceFiles(count); } public Map<String, LogCommitInfo> removeCommitsAfter( Map<String, LogCommitInfo> commits, String sha) { Date endDate = commits.get(sha).getMainCommitDate(); return filterCommitsByDate(commits, endDate); } public TFInfo getTF(Date calcDate, Map<String, LogCommitInfo> allRepoCommits, LogCommitInfo nearCommit) throws IOException, Exception { Map<String, LogCommitInfo> partialRepoCommits = filterCommitsByDate(allRepoCommits, calcDate); //Extract file info at the new moment String stdOut = createAndExecuteCommand("./getInfoAtSpecifcCommit.sh "+ repositoryPath + " " + nearCommit.getSha()); System.out.println(stdOut); // initializeExtractors(repositoryPath, repositoryName); if (aliasInfo!= null && aliasInfo.containsKey(repositoryName)) this.replaceNamesInLogCommitFile(aliasInfo.get(repositoryName)); // GET Repository files List<NewFileInfo> files = fileExtractor.execute(); files = linguistExtractor.setNotLinguist(files); // GET Repository DOA results DOACalculator doaCalculator = new DOACalculator(repositoryPath, repositoryName, partialRepoCommits.values(), files); Repository repository = doaCalculator.execute(); // GET Repository TF TruckFactor truckFactor = new PrunedGreedyTruckFactor(0.1f); TFInfo tf = truckFactor.getTruckFactor(repository); return tf; } public LogCommitInfo getNearCommit(Date calcDate, List<LogCommitInfo> sortedCommitList) { LogCommitInfo retCommit = sortedCommitList.get(0); for (LogCommitInfo logCommitInfo : sortedCommitList) { if (logCommitInfo.getMainCommitDate().before(calcDate) || logCommitInfo.getMainCommitDate().equals(calcDate)) retCommit = logCommitInfo; else return retCommit; } return retCommit; } public void updateRepo(ProjectInfoDAO projectDAO, ProjectInfo projectInfo, Map<String, LogCommitInfo> allRepoCommits, Map<Integer, DeveloperInfo> repositoryDevelopers, LogCommitInfo firstCommit, LogCommitInfo lastCommit) throws Exception, IOException { projectInfo.setFirstCommit(firstCommit.getMainCommitDate()); projectInfo.setLastCommit(lastCommit.getMainCommitDate()); projectInfo.setNumAuthors(getNAuthors(repositoryDevelopers)); // GET Repository files List<NewFileInfo> files = fileExtractor.execute(); files = linguistExtractor.setNotLinguist(files); projectInfo.setNumFiles(getNumFiles(files)); // GET Repository DOA results DOACalculator doaCalculator = new DOACalculator(repositoryPath, repositoryName, allRepoCommits.values(), files); Repository repository = doaCalculator.execute(); // GET Repository TF TruckFactor truckFactor = new PrunedGreedyTruckFactor(0.1f); TFInfo tf = truckFactor.getTruckFactor(repository); projectInfo.setTf(tf.getTf()); System.out.println(tf); projectInfo.setStatus(ProjectStatus.TF_COMPUTED); projectDAO.update(projectInfo); } private int getNumFiles(List<NewFileInfo> files) { int n=0; for (NewFileInfo newFileInfo : files) { if (!newFileInfo.getFiltered()) n++; } return n; } public static int daysBetween(Date d1, Date d2){ return (int)( (d2.getTime() - d1.getTime()) / (1000 * 60 * 60 * 24)); } public Map<String, LogCommitInfo> filterCommitsByDate( Map<String, LogCommitInfo> commits, Date endDate) { Map<String, LogCommitInfo> newCommits = new HashMap<String, LogCommitInfo>(commits); for (Entry<String, LogCommitInfo> entry : commits.entrySet()) { if (entry.getValue().getMainCommitDate().after(endDate)) newCommits.remove(entry.getKey()); } return newCommits; } public List<LogCommitInfo> getSortedCommitList( Map<String, LogCommitInfo> commits) { List<LogCommitInfo> newListOfCommits = new ArrayList<LogCommitInfo>(commits.values()); Collections.sort(newListOfCommits, new Comparator<LogCommitInfo>() { @Override public int compare(LogCommitInfo lhs, LogCommitInfo rhs) { return lhs.getMainCommitDate().compareTo(rhs.getMainCommitDate()); } }); return newListOfCommits; } public int getNAuthors( Map<Integer, DeveloperInfo> repositoryDevelopers) { Set<Integer> userIds = new HashSet<Integer>(); for (DeveloperInfo dev : repositoryDevelopers.values()) { // System.out.println(dev); userIds.add(dev.getUserId()); } return userIds.size(); } public Map<Integer, DeveloperInfo> getRepositoryDevelopers( Map<String, LogCommitInfo> commits, Map<String, Integer> mapIds) { Map<Integer, DeveloperInfo> tempMap = new HashMap<Integer, DeveloperInfo>(); for (LogCommitInfo commit : commits.values()) { Integer userId = mapIds.get(commit.getUserName()); if (!tempMap.containsKey(userId)) tempMap.put(userId, new DeveloperInfo(commit.getNormMainName(), commit.getNormMainEmail(), commit.getUserName(), userId)); tempMap.get(userId).addCommit(commit); } Map<Integer, DeveloperInfo> repositoryDevelopers = new HashMap<Integer, DeveloperInfo>(); for (Entry<String, Integer> entry : mapIds.entrySet()) { if (tempMap.get(entry.getValue())!=null) repositoryDevelopers.put(entry.getValue(), tempMap.get(entry.getValue())); } return repositoryDevelopers; } public void print(Map<String, LogCommitInfo> commits) { for (LogCommitInfo commit : commits.values()) { System.out.printf("%s;%s;%s;%s;%s;%s\n", commit.getSha(), commit.getAuthorName(), commit.getAuthorEmail(), commit.getCommitterName(), commit.getCommitterEmail(), commit.getUserName()); } } public void print2(Map<String, LogCommitInfo> commits) { for (LogCommitInfo commit : commits.values()) { System.out.printf("%s;%s;%s;%s;%s;%s\n", commit.getSha(), commit.getAuthorName(), commit.getAuthorEmail(), commit.getCommitterName(), commit.getCommitterEmail(), commit.getAuthorId()); } } public String createAndExecuteCommand(String cmd) throws IOException, Exception { SystemCommandExecutor commandExecutor = createCommand(cmd); int result = commandExecutor.executeCommand(); return commandExecutor.getStandardOutputFromCommand().toString(); } private static SystemCommandExecutor createCommand(String cmd) { List<String> command = new ArrayList<String>(); for (String str : cmd.split(" ")) { command.add(str); } // execute my command SystemCommandExecutor commandExecutor = new SystemCommandExecutor(command); return commandExecutor; } }
/* * The MIT License (MIT) * * Copyright (c) 2015 paqueloz * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.batmgr.filesystem; import java.nio.file.attribute.FileTime; import java.time.Instant; import java.time.format.DateTimeParseException; import javax.xml.bind.DatatypeConverter; /** * Manage the details of a file. <br> * From / to directory index. */ @SuppressWarnings("nls") public class FileInfo { private static final int FLAG_REMOVED = 1; private boolean initialized = false; private String name; private long size; private FileTime lastModif; // Round milliseconds down to 0 private String hash; // SHA-256 private static final int HASH_BYTES = 32; // length of the hash private int flags; // 4 hex digits (2 bytes) /** * Default constructor : the object exists but is not initialized */ public FileInfo() { } public FileInfo(String name, long size, FileTime lastModif, String hash, int flags) { this.name = name; this.size = size; this.lastModif = secondFileTime(lastModif); this.hash = hash; if (flags < 0 || flags > 0xffff) { throw new IllegalArgumentException(String.format("flags %X is not between 0 and 0xffff", flags)); } this.flags = flags; initialized = true; } public FileTime secondFileTime(FileTime lastModif) { long millis = lastModif.toMillis(); long diff = millis % 1000; return FileTime.fromMillis(millis - diff); } /** * @param line structured as SHA256;0000;size;YYYY-MM-DDTHH:MM:SS;name * @throws IllegalArgumentException */ public FileInfo(String line) throws IllegalArgumentException { if (line == null) { throw new IllegalArgumentException("line must not be null"); } String[] fields = line.split(";"); if (fields.length < 5) { throw new IllegalArgumentException(String.format("line has %d fields instead of 5", fields.length)); } byte[] hex = null; try { hex = DatatypeConverter.parseHexBinary(fields[0]); } catch (IllegalArgumentException e) { // exception handled below } if (hex == null || hex.length != HASH_BYTES) { throw new IllegalArgumentException(String.format("%s is not a valid SHA-256 signature", fields[0])); } hash = fields[0]; int val = -1; try { val = Integer.parseInt(fields[1], 16); } catch (NumberFormatException e) { // exception handled below } if (val == -1 || fields[1].length() != 4) { throw new IllegalArgumentException(String.format("%s is not a valid 16 bit status", fields[1])); } flags = val; long len = -1; try { len = Long.parseLong(fields[2]); } catch (NumberFormatException e) { // exception handled below } if (len == -1) { throw new IllegalArgumentException(String.format("%s is not a valid size", fields[2])); } size = len; Instant instant = null; try { instant = Instant.parse(fields[3]); } catch (DateTimeParseException e) { // exception handled below } if (instant == null) { throw new IllegalArgumentException(String.format("%s is not a valid timestamp", fields[3])); } lastModif = FileTime.from(instant); StringBuffer finLigne = new StringBuffer(fields[4]); for (int i = 5; i < fields.length; i++) { finLigne.append(";").append(fields[i]); } if (finLigne.length() == 0) { throw new IllegalArgumentException(String.format("file name is empty")); } name = finLigne.toString(); initialized = true; } public String getHash() { if (!initialized) { throw new IllegalStateException(Constantes.OBJECT_NOT_INITIALIZED); } return hash; } @Override public String toString() { if (!initialized) { return Constantes.OBJECT_NOT_INITIALIZED; } return String.format("%s;%04X;%d;%s;%s", hash, flags, Long.valueOf(size), lastModif.toString(), name); } public String getName() { return name; } public long getSize() { return size; } public static String getHumanReadableSize(long bytes) { int unit = 1024; if (bytes < unit) { return bytes + " B"; } int exp = (int) (Math.log(bytes) / Math.log(unit)); String pre = "KMGTPE".charAt(exp - 1) + "i"; return String.format("%.1f %sB", bytes / Math.pow(unit, exp), pre); } public FileTime getLastModif() { return lastModif; } /** * Compute the flags location * @param loc beginning of the entry * @return loc + appropriate offset */ public long getFlagsLocation(Integer loc) { return loc + 2 * HASH_BYTES + 1; } /** * Change flags to mark file as removed */ public void setRemovedFlag() { flags |= FLAG_REMOVED; } /** * @return true if the removed flag is set */ public boolean isRemovedFlagSet() { return (flags & FLAG_REMOVED) != 0; } /** * Return the current value of the flags * @return 4 hex digits */ public String getFlagsString() { return String.format("%04X", flags); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.beanutils.locale; import java.lang.ref.ReferenceQueue; import java.lang.ref.WeakReference; import java.util.Locale; import java.util.Map; import java.util.WeakHashMap; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import org.apache.commons.beanutils.BeanUtilsTestCase; import org.apache.commons.beanutils.ContextClassLoaderLocal; import org.apache.commons.beanutils.ConversionException; import org.apache.commons.beanutils.ConvertUtils; import org.apache.commons.beanutils.PrimitiveBean; import org.apache.commons.beanutils.locale.converters.LongLocaleConverter; import org.apache.commons.logging.LogFactory; /** * <p> * Test Case for changes made during LocaleBeanutils Beanification. * This is basically a cut-and-correct version of the beanutils beanifications tests. * </p> * * @version $Id$ */ public class LocaleBeanificationTestCase extends TestCase { // ---------------------------------------------------- Constants /** Maximum number of iterations before our test fails */ public static final int MAX_GC_ITERATIONS = 50; // ---------------------------------------------------- Instance Variables // ---------------------------------------------------------- Constructors /** * Construct a new instance of this test case. * * @param name Name of the test case */ public LocaleBeanificationTestCase(final String name) { super(name); } // -------------------------------------------------- Overall Test Methods /** * Set up instance variables required by this test case. */ @Override public void setUp() { LocaleConvertUtils.deregister(); } /** * Return the tests included in this test suite. */ public static Test suite() { return (new TestSuite(LocaleBeanificationTestCase.class)); } /** * Tear down instance variables required by this test case. */ @Override public void tearDown() { // No action required } // ------------------------------------------------ Individual Test Methods /** Test of the methodology we'll use for some of the later tests */ public void testMemoryTestMethodology() throws Exception { // test methodology // many thanks to Juozas Baliuka for suggesting this method ClassLoader loader = new ClassLoader(this.getClass().getClassLoader()) {}; final WeakReference<ClassLoader> reference = new WeakReference<ClassLoader>(loader); Class<?> myClass = loader.loadClass("org.apache.commons.beanutils.BetaBean"); assertNotNull("Weak reference released early", reference.get()); // dereference class loader and class: loader = null; myClass = null; int iterations = 0; int bytz = 2; while(true) { System.gc(); if(iterations++ > MAX_GC_ITERATIONS){ fail("Max iterations reached before resource released."); } if( reference.get() == null ) { break; } else { // create garbage: final byte[] b = new byte[bytz]; bytz = bytz * 2; } } } /** Tests whether classloaders and beans are released from memory by the map used by beanutils */ public void testMemoryLeak2() throws Exception { // tests when the map used by beanutils has the right behaviour if (BeanUtilsTestCase.isPre14JVM()) { System.out.println("WARNING: CANNOT TEST MEMORY LEAK ON PRE1.4 JVM"); return; } // many thanks to Juozas Baliuka for suggesting this methodology TestClassLoader loader = new TestClassLoader(); final ReferenceQueue<Object> queue = new ReferenceQueue<Object>(); final WeakReference<ClassLoader> loaderReference = new WeakReference<ClassLoader>(loader, queue); Integer test = new Integer(1); final WeakReference<Integer> testReference = new WeakReference<Integer>(test, queue); //Map map = new ReferenceMap(ReferenceMap.WEAK, ReferenceMap.HARD, true); final Map<TestClassLoader, Integer> map = new WeakHashMap<TestClassLoader, Integer>(); map.put(loader, test); assertEquals("In map", test, map.get(loader)); assertNotNull("Weak reference released early (1)", loaderReference.get()); assertNotNull("Weak reference released early (2)", testReference.get()); // dereference strong references loader = null; test = null; int iterations = 0; int bytz = 2; while(true) { System.gc(); if(iterations++ > MAX_GC_ITERATIONS){ fail("Max iterations reached before resource released."); } map.isEmpty(); if( loaderReference.get() == null && testReference.get() == null) { break; } else { // create garbage: final byte[] b = new byte[bytz]; bytz = bytz * 2; } } } /** Tests whether classloaders and beans are released from memory */ public void testMemoryLeak() throws Exception { if (BeanUtilsTestCase.isPre14JVM()) { System.out.println("WARNING: CANNOT TEST MEMORY LEAK ON PRE1.4 JVM"); return; } // many thanks to Juozas Baliuka for suggesting this methodology TestClassLoader loader = new TestClassLoader(); final WeakReference<TestClassLoader> loaderReference = new WeakReference<TestClassLoader>(loader); LocaleBeanUtilsBean.getLocaleBeanUtilsInstance(); class GetBeanUtilsBeanThread extends Thread { LocaleBeanUtilsBean beanUtils; LocaleConvertUtilsBean convertUtils; GetBeanUtilsBeanThread() {} @Override public void run() { beanUtils = LocaleBeanUtilsBean.getLocaleBeanUtilsInstance(); convertUtils = LocaleConvertUtilsBean.getInstance(); // XXX Log keeps a reference around! LogFactory.releaseAll(); } @Override public String toString() { return "GetBeanUtilsBeanThread"; } } GetBeanUtilsBeanThread thread = new GetBeanUtilsBeanThread(); final WeakReference<GetBeanUtilsBeanThread> threadWeakReference = new WeakReference<GetBeanUtilsBeanThread>(thread); thread.setContextClassLoader(loader); thread.start(); thread.join(); final WeakReference<LocaleBeanUtilsBean> beanUtilsReference = new WeakReference<LocaleBeanUtilsBean>(thread.beanUtils); final WeakReference<LocaleConvertUtilsBean> convertUtilsReference = new WeakReference<LocaleConvertUtilsBean>(thread.convertUtils); assertNotNull("Weak reference released early (1)", loaderReference.get()); assertNotNull("Weak reference released early (2)", beanUtilsReference.get()); assertNotNull("Weak reference released early (4)", convertUtilsReference.get()); // dereference strong references loader = null; thread.setContextClassLoader(null); thread = null; int iterations = 0; int bytz = 2; while(true) { LocaleBeanUtilsBean.getLocaleBeanUtilsInstance(); System.gc(); if(iterations++ > MAX_GC_ITERATIONS){ fail("Max iterations reached before resource released."); } if( loaderReference.get() == null && beanUtilsReference.get() == null && convertUtilsReference.get() == null) { break; } else { // create garbage: final byte[] b = new byte[bytz]; bytz = bytz * 2; } } } /** * Tests whether difference instances are loaded by different * context classloaders. */ public void testGetByContextClassLoader() throws Exception { class GetBeanUtilsBeanThread extends Thread { private final Signal signal; GetBeanUtilsBeanThread(final Signal signal) { this.signal = signal; } @Override public void run() { signal.setSignal(2); signal.setBean(LocaleBeanUtilsBean.getLocaleBeanUtilsInstance()); signal.setConvertUtils(LocaleConvertUtilsBean.getInstance()); } @Override public String toString() { return "GetBeanUtilsBeanThread"; } } final Signal signal = new Signal(); signal.setSignal(1); final GetBeanUtilsBeanThread thread = new GetBeanUtilsBeanThread(signal); thread.setContextClassLoader(new TestClassLoader()); thread.start(); thread.join(); assertEquals("Signal not set by test thread", 2, signal.getSignal()); assertTrue( "Different LocaleBeanUtilsBean instances per context classloader", LocaleBeanUtilsBean.getInstance() != signal.getBean()); assertTrue( "Different LocaleConvertUtilsBean instances per context classloader", LocaleConvertUtilsBean.getInstance() != signal.getConvertUtils()); } /** * Tests whether difference instances are loaded by different * context classloaders. */ public void testContextClassLoaderLocal() throws Exception { class CCLLTesterThread extends Thread { private final Signal signal; private final ContextClassLoaderLocal<Integer> ccll; CCLLTesterThread(final Signal signal, final ContextClassLoaderLocal<Integer> ccll) { this.signal = signal; this.ccll = ccll; } @Override public void run() { ccll.set(new Integer(1789)); signal.setSignal(2); signal.setMarkerObject(ccll.get()); } @Override public String toString() { return "CCLLTesterThread"; } } final ContextClassLoaderLocal<Integer> ccll = new ContextClassLoaderLocal<Integer>(); ccll.set(1776); assertEquals("Start thread sets value", new Integer(1776), ccll.get()); final Signal signal = new Signal(); signal.setSignal(1); final CCLLTesterThread thread = new CCLLTesterThread(signal, ccll); thread.setContextClassLoader(new TestClassLoader()); thread.start(); thread.join(); assertEquals("Signal not set by test thread", 2, signal.getSignal()); assertEquals("Second thread preserves value", new Integer(1776), ccll.get()); assertEquals("Second thread gets value it set", new Integer(1789), signal.getMarkerObject()); } /** Tests whether calls are independent for different classloaders */ public void testContextClassloaderIndependence() throws Exception { class TestIndependenceThread extends Thread { private final Signal signal; private final PrimitiveBean bean; TestIndependenceThread(final Signal signal, final PrimitiveBean bean) { this.signal = signal; this.bean = bean; } @Override public void run() { try { signal.setSignal(3); LocaleConvertUtils.register(new LocaleConverter() { public <T> T convert(final Class<T> type, final Object value) { return ConvertUtils.primitiveToWrapper(type).cast(9); } public <T> T convert(final Class<T> type, final Object value, final String pattern) { return ConvertUtils.primitiveToWrapper(type).cast(9); } }, Integer.TYPE, Locale.getDefault()); LocaleBeanUtils.setProperty(bean, "int", "1"); } catch (final Exception e) { e.printStackTrace(); signal.setException(e); } } @Override public String toString() { return "TestIndependenceThread"; } } final PrimitiveBean bean = new PrimitiveBean(); LocaleBeanUtils.setProperty(bean, "int", new Integer(1)); assertEquals("Wrong property value (1)", 1, bean.getInt()); LocaleConvertUtils.register(new LocaleConverter() { public <T> T convert(final Class<T> type, final Object value) { return ConvertUtils.primitiveToWrapper(type).cast(5); } public <T> T convert(final Class<T> type, final Object value, final String pattern) { return ConvertUtils.primitiveToWrapper(type).cast(5); } }, Integer.TYPE, Locale.getDefault()); LocaleBeanUtils.setProperty(bean, "int", "1"); assertEquals("Wrong property value(2)", 5, bean.getInt()); final Signal signal = new Signal(); signal.setSignal(1); final TestIndependenceThread thread = new TestIndependenceThread(signal, bean); thread.setContextClassLoader(new TestClassLoader()); thread.start(); thread.join(); assertNull("Exception thrown by test thread:" + signal.getException(), signal.getException()); assertEquals("Signal not set by test thread", 3, signal.getSignal()); assertEquals("Wrong property value(3)", 9, bean.getInt()); } /** Tests whether different threads can set beanutils instances correctly */ public void testBeanUtilsBeanSetInstance() throws Exception { class SetInstanceTesterThread extends Thread { private final Signal signal; private final LocaleBeanUtilsBean bean; SetInstanceTesterThread(final Signal signal, final LocaleBeanUtilsBean bean) { this.signal = signal; this.bean = bean; } @Override public void run() { LocaleBeanUtilsBean.setInstance(bean); signal.setSignal(21); signal.setBean(LocaleBeanUtilsBean.getLocaleBeanUtilsInstance()); } @Override public String toString() { return "SetInstanceTesterThread"; } } final Signal signal = new Signal(); signal.setSignal(1); final LocaleBeanUtilsBean beanOne = new LocaleBeanUtilsBean(); final LocaleBeanUtilsBean beanTwo = new LocaleBeanUtilsBean(); final SetInstanceTesterThread thread = new SetInstanceTesterThread(signal, beanTwo); thread.setContextClassLoader(new TestClassLoader()); LocaleBeanUtilsBean.setInstance(beanOne); assertEquals("Start thread gets right instance", beanOne, LocaleBeanUtilsBean.getLocaleBeanUtilsInstance()); thread.start(); thread.join(); assertEquals("Signal not set by test thread", 21, signal.getSignal()); assertEquals("Second thread preserves value", beanOne, LocaleBeanUtilsBean.getLocaleBeanUtilsInstance()); assertEquals("Second thread gets value it set", beanTwo, signal.getBean()); } /** Tests whether the unset method works*/ public void testContextClassLoaderUnset() throws Exception { final LocaleBeanUtilsBean beanOne = new LocaleBeanUtilsBean(); final ContextClassLoaderLocal<LocaleBeanUtilsBean> ccll = new ContextClassLoaderLocal<LocaleBeanUtilsBean>(); ccll.set(beanOne); assertEquals("Start thread gets right instance", beanOne, ccll.get()); ccll.unset(); assertTrue("Unset works", !beanOne.equals(ccll.get())); } /** * Test registering a locale-aware converter with the standard ConvertUtils. */ public void testLocaleAwareConverterInConvertUtils() throws Exception { try { // first use the default non-locale-aware converter try { final Long data = (Long) ConvertUtils.convert("777", Long.class); assertEquals("Standard format long converted ok", 777, data.longValue()); } catch(final ConversionException ex) { fail("Unable to convert non-locale-aware number 777"); } // now try default converter with special delimiters try { // This conversion will cause an error. But the default // Long converter is set up to return a default value of // zero on error. final Long data = (Long) ConvertUtils.convert("1.000.000", Long.class); assertEquals("Standard format behaved as expected", 0, data.longValue()); } catch(final ConversionException ex) { fail("Unexpected exception from standard Long converter."); } // Now try using a locale-aware converter together with // locale-specific input string. Note that in the german locale, // large numbers can be split up into groups of three digits // using a dot character (and comma is the decimal-point indicator). try { final Locale germanLocale = Locale.GERMAN; final LongLocaleConverter longLocaleConverter = new LongLocaleConverter(germanLocale); ConvertUtils.register(longLocaleConverter, Long.class); final Long data = (Long) ConvertUtils.convert("1.000.000", Long.class); assertEquals("German-format long converted ok", 1000000, data.longValue()); } catch(final ConversionException ex) { fail("Unable to convert german-format number"); } } finally { ConvertUtils.deregister(); } } // ---- Auxillary classes class TestClassLoader extends ClassLoader { @Override public String toString() { return "TestClassLoader"; } } class Signal { private Exception e; private int signal = 0; private LocaleBeanUtilsBean bean; private LocaleConvertUtilsBean convertUtils; private Object marker; public Exception getException() { return e; } public void setException(final Exception e) { this.e = e; } public int getSignal() { return signal; } public void setSignal(final int signal) { this.signal = signal; } public Object getMarkerObject() { return marker; } public void setMarkerObject(final Object marker) { this.marker = marker; } public LocaleBeanUtilsBean getBean() { return bean; } public void setBean(final LocaleBeanUtilsBean bean) { this.bean = bean; } public LocaleConvertUtilsBean getConvertUtils() { return convertUtils; } public void setConvertUtils(final LocaleConvertUtilsBean convertUtils) { this.convertUtils = convertUtils; } } }
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.framework.project; import java.io.*; import java.net.URL; import java.util.*; import java.util.function.Function; import java.util.stream.Collectors; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import ghidra.framework.GenericRunInfo; import ghidra.framework.ToolUtils; import ghidra.framework.client.*; import ghidra.framework.data.TransientDataManager; import ghidra.framework.model.*; import ghidra.framework.preferences.Preferences; import ghidra.framework.protocol.ghidra.GhidraURL; import ghidra.framework.store.LockException; import ghidra.util.*; import ghidra.util.exception.NotFoundException; import utilities.util.FileUtilities; /** * Implementation for a ProjectManager; creates, opens, * and deletes Projects. It also keeps track of recently opened projects. */ public class DefaultProjectManager implements ProjectManager { /** * Preference name for the last opened project. */ private final static String LAST_OPENED_PROJECT = "LastOpenedProject"; private static final Logger LOG = LogManager.getLogger(DefaultProjectManager.class); private static final String RECENT_PROJECTS = "RecentProjects"; private static final String VIEWED_PROJECTS = "ViewedProjects"; private static final String SERVER_INFO = "ServerInfo"; private static final int RECENT_PROJECTS_LIMIT = 6; private static String PROJECT_PATH_SEPARATOR = ";"; private List<ProjectLocator> recentlyOpenedProjectsList; private List<URL> recentlyViewedProjectsList; private ToolChest userToolChest; private ServerInfo serverInfo; private ProjectLocator lastOpenedProject; private Project currentProject; /** * Construct the single project manager. */ protected DefaultProjectManager() { recentlyOpenedProjectsList = new ArrayList<>(); recentlyViewedProjectsList = new ArrayList<>(); userToolChest = createUserToolChest(); // get locator for last opened project lastOpenedProject = getLastOpenedProject(); // read known projects from ghidra preferences... populateProjectLocatorList(recentlyOpenedProjectsList, RECENT_PROJECTS); populateProjectURLList(recentlyViewedProjectsList, VIEWED_PROJECTS); updatePreferences(); serverInfo = getServerInfo(Preferences.getProperty(SERVER_INFO)); } @Override public Project getActiveProject() { return currentProject; } @Override public Project createProject(ProjectLocator projectLocator, RepositoryAdapter repAdapter, boolean remember) throws IOException { if (currentProject != null) { Msg.error(this, "Current project must be closed before establishing a new active project"); return null; } if (!projectLocator.getMarkerFile().getParentFile().isDirectory()) { throw new FileNotFoundException( "Directory not found: " + projectLocator.getMarkerFile().getParentFile()); } try { currentProject = new DefaultProject(this, projectLocator, repAdapter); } catch (LockException e) { throw new IOException(e.getMessage()); } if (remember) { addProjectToList(recentlyOpenedProjectsList, projectLocator); lastOpenedProject = projectLocator; updatePreferences(); } return currentProject; } @Override public Project openProject(ProjectLocator projectLocator, boolean doRestore, boolean resetOwner) throws NotFoundException, NotOwnerException, LockException { if (currentProject != null) { Msg.error(this, "Current project must be closed before establishing a new active project"); return null; } if (!projectLocator.getMarkerFile().exists()) { forgetProject(projectLocator); throw new NotFoundException( "Project marker file not found: " + projectLocator.getMarkerFile()); } if (!projectLocator.getProjectDir().isDirectory()) { forgetProject(projectLocator); throw new NotFoundException( "Project directory not found: " + projectLocator.getProjectDir()); } try { currentProject = new DefaultProject(this, projectLocator, resetOwner); if (doRestore) { currentProject.restore(); } // success addProjectToList(recentlyOpenedProjectsList, projectLocator); lastOpenedProject = projectLocator; updatePreferences(); return currentProject; } catch (LockException e) { return null; } catch (ReadOnlyException e) { Msg.showError(LOG, null, "Read-only Project!", "Cannot open project for update: " + projectLocator); } catch (IOException e) { Msg.showError(LOG, null, "Open Project Failed!", "Could not open project " + projectLocator + "\n \nCAUSE: " + e.getMessage()); } finally { if (currentProject == null) { File dirFile = projectLocator.getProjectDir(); if (!dirFile.exists() || !dirFile.isDirectory()) { forgetProject(projectLocator); } } } return null; } /** * Get list of project locations that user most recently opened. * @return list of project locations */ @Override public ProjectLocator[] getRecentProjects() { ProjectLocator[] projectLocators = new ProjectLocator[recentlyOpenedProjectsList.size()]; return recentlyOpenedProjectsList.toArray(projectLocators); } @Override public URL[] getRecentViewedProjects() { URL[] urls = new URL[recentlyViewedProjectsList.size()]; return recentlyViewedProjectsList.toArray(urls); } /** * Get the last opened (active) project. * @return project last opened by the user; returns NULL if a project * was never opened OR the last opened project is no longer valid */ @Override public ProjectLocator getLastOpenedProject() { String projectPath = Preferences.getProperty(LAST_OPENED_PROJECT); if (projectPath == null || projectPath.trim().length() == 0) { return null; } return getLocatorFromProjectPath(projectPath); } /** * Update the last opened project preference. */ @Override public void setLastOpenedProject(ProjectLocator projectLocator) { Preferences.setProperty(LAST_OPENED_PROJECT, projectLocator != null ? projectLocator.toString() : null); Preferences.store(); } /** * Delete the project in the given location and remove it from the list of known projects. * * @return false if no project was deleted. */ @Override public boolean deleteProject(ProjectLocator projectLocator) { File dir = projectLocator.getProjectDir(); File file = projectLocator.getMarkerFile(); if (!dir.exists()) { throw new RuntimeException(file.getAbsolutePath() + " does not exist"); } if (!dir.isDirectory()) { return false; } boolean didDelete = (FileUtilities.deleteDir(dir) && (!file.exists() || file.delete())); forgetProject(projectLocator); return didDelete; } /** * Remove the specified project from the list of known projects. */ private void forgetProject(ProjectLocator projectLocator) { if (projectLocator == null) { return; } if (projectLocator.equals(lastOpenedProject)) { lastOpenedProject = null; } recentlyOpenedProjectsList.remove(projectLocator); updatePreferences(); } /** * Keep the specified project on the list of known projects. */ @Override public void rememberProject(ProjectLocator projectLocator) { if (!recentlyOpenedProjectsList.contains(projectLocator)) { addProjectToList(recentlyOpenedProjectsList, projectLocator); updatePreferences(); } } @Override public void forgetViewedProject(URL url) { if (url == null) { return; } recentlyViewedProjectsList.remove(url); updatePreferences(); } @Override public void rememberViewedProject(URL url) { if (!recentlyViewedProjectsList.contains(url)) { recentlyViewedProjectsList.add(0, url); if (recentlyViewedProjectsList.size() > RECENT_PROJECTS_LIMIT) { recentlyViewedProjectsList.remove(recentlyViewedProjectsList.size() - 1); } updatePreferences(); } } /** * Returns true if the specified project exists. */ @Override public boolean projectExists(ProjectLocator projectLocator) { return projectLocator.getProjectDir().exists(); } @Override public RepositoryServerAdapter getRepositoryServerAdapter(String host, int portNumber, boolean forceConnect) { RepositoryServerAdapter rsh = ClientUtil.getRepositoryServer(host, portNumber, forceConnect); serverInfo = rsh.getServerInfo(); updatePreferences(); return rsh; } @Override public ServerInfo getMostRecentServerInfo() { return serverInfo; } /** * Add the default tools to the given tool chest. This method does not attempt to merge the * user's previous tools, as does {@link #installTools(ToolChest)}. * * @param toolChest tool chest which to add the default tools */ public void addDefaultTools(ToolChest toolChest) { Set<ToolTemplate> tools = ToolUtils.getDefaultApplicationTools(); if (tools == null || tools.isEmpty()) { Msg.showError(LOG, null, "Default Tools Not Found", "Could not find default tools for project."); return; } for (ToolTemplate template : tools) { addDefaultTool(toolChest, template); } } private void installTools(ToolChest toolChest) { LOG.debug("No tools found; Installing default tools"); File recoveryDirectory = getMostRecentValidProjectDirectory(); if (recoveryDirectory == null) { LOG.debug("\tno recent project directories found"); addDefaultTools(toolChest); return; } // get old tools Set<ToolTemplate> tools = ToolUtils.getDefaultApplicationTools(); if (tools == null || tools.isEmpty()) { Msg.showError(LOG, null, "Default Tools Not Found", "Could not find default tools for project."); return; } // get the user's exiting tool, adding any default tools they don't have Set<ToolTemplate> preExistingUserTools = getPreExistingUserTools(recoveryDirectory); Collection<ToolTemplate> mergedTools = mergeDefaultToolsIntoExisting(tools, preExistingUserTools); for (ToolTemplate toolTemplate : mergedTools) { addDefaultTool(toolChest, toolTemplate); } } private File getMostRecentValidProjectDirectory() { List<File> ghidraUserDirsByTime = GenericRunInfo.getPreviousApplicationSettingsDirsByTime(); if (ghidraUserDirsByTime.size() == 0) { return null; } // get the tools from the most recent projects first for (File ghidraUserDir : ghidraUserDirsByTime) { File[] listFiles = ghidraUserDir.listFiles(); if (listFiles == null) { // empty ghidra dir continue; } for (File ghidraDirSubFile : listFiles) { if (ghidraDirSubFile.getName().equals(APPLICATION_TOOLS_DIR_NAME)) { return ghidraUserDir; // found a tools dir; move on } } } return null; } private Collection<ToolTemplate> mergeDefaultToolsIntoExisting(Set<ToolTemplate> defaultTools, Set<ToolTemplate> userTools) { if (userTools.isEmpty()) { // no previous tools--use default tools return new HashSet<>(defaultTools); } LOG.debug("Found the following default tools: "); for (ToolTemplate tool : defaultTools) { LOG.debug("-" + tool); } LOG.debug("Found existing tools; merging existing tools: "); for (ToolTemplate tool : userTools) { LOG.debug("-" + tool); } //@formatter:off Map<String, ToolTemplate> allTools = new HashMap<>(); Map<String, ToolTemplate> defaultMap = defaultTools.stream() .collect(Collectors.toMap(t -> t.getName(), Function.identity())) ; Map<String, ToolTemplate> userMap = userTools.stream() .collect(Collectors.toMap(t -> t.getName(), Function.identity())) ; allTools.putAll(defaultMap); allTools.putAll(userMap); // user tools last, overwriting the defaults; they are preferred //@formatter:on return allTools.values(); } private URL saveTool(ToolTemplate toolTemplate) throws Exception { if (!ToolUtils.writeToolTemplate(toolTemplate)) { return null; } File newFile = ToolUtils.getToolFile(toolTemplate.getName()); if (newFile == null) { return null; } return newFile.toURI().toURL(); } /* Gets tools from the user's last project */ private Set<ToolTemplate> getPreExistingUserTools(File previousUserDir) { if (previousUserDir == null) { return Collections.emptySet(); } FileFilter dirFilter = file -> file.isDirectory() && file.getName().equals(APPLICATION_TOOLS_DIR_NAME); File[] toolDirs = previousUserDir.listFiles(dirFilter); if (toolDirs == null || toolDirs.length != 1) { LOG.debug("No user tools found in '" + previousUserDir + "'"); return Collections.emptySet(); } File toolsDir = toolDirs[0]; FileFilter filter = file -> file.getAbsolutePath().endsWith(APPLICATION_TOOL_EXTENSION); File[] toolFiles = toolsDir.listFiles(filter); Set<ToolTemplate> set = new HashSet<>(); for (File toolFile : toolFiles) { ToolTemplate template = ToolUtils.readToolTemplate(toolFile); scrubUserTool(template); set.add(template); } return set; } private void scrubUserTool(ToolTemplate template) { ToolUtils.removeInvalidPlugins(template); try { saveTool(template); } catch (Exception e) { Msg.error(LOG, "Unable to save user tool '" + template.getName() + "': " + e.getMessage(), e); } } @Override public ToolChest getUserToolChest() { return userToolChest; } private void addDefaultTool(ToolChest toolChest, ToolTemplate template) { // this implies that there exist multiple *default* tools with the same name, which // is an error condition. if (toolChest.getToolTemplate(template.getName()) != null) { Msg.showWarn(LOG, null, "Error Adding Tool", "Found multiple default tools with the same name: " + template.getName() + ".\nCheck the classpath for " + "entries that contain tools that share the same tool name"); } // Note: we call replace here and not add, since we know that we want to put a new tool // in by the given name. At this point we can assume there are not yet any // tools to overwrite, since this method is only called when no tools existed and // we are adding the default set. toolChest.replaceToolTemplate(template); } protected ToolChest createUserToolChest() { ToolChest toolChest = new ToolChestImpl(); try { if (toolChest.getToolCount() == 0) { installTools(toolChest); } } catch (Exception e) { Msg.showError(LOG, null, "Tool Chest Error", "Failed to create tool chest.", e); } return toolChest; } /** * Add the project to the given list; * most recently accessed projects are first in the list. */ private boolean addProjectToList(List<ProjectLocator> list, ProjectLocator projectLocator) { File file = projectLocator.getMarkerFile(); if (!file.exists()) { return false; } File dirFile = projectLocator.getProjectDir(); if (!dirFile.exists()) { return false; } list.remove(projectLocator); list.add(0, projectLocator); if (list.size() > RECENT_PROJECTS_LIMIT) { list.remove(list.size() - 1); } return true; } private void populateProjectLocatorList(List<ProjectLocator> list, String propertyName) { String projectNames = Preferences.getProperty(propertyName, null, true); if (projectNames == null) { return; } // TODO: fixed pathSeparator should be used to allow preferences to be more portable between platforms StringTokenizer st = new StringTokenizer(projectNames, PROJECT_PATH_SEPARATOR); while (st.hasMoreElements()) { String path = (String) st.nextElement(); ProjectLocator projectLocator = getLocatorFromProjectPath(path); if (projectLocator != null) { list.add(projectLocator); if (list.size() == RECENT_PROJECTS_LIMIT) { break; } } } } private ProjectLocator getLocatorFromProjectPath(String path) { try { URL url = GhidraURL.toURL(path); if (GhidraURL.localProjectExists(url)) { return GhidraURL.getProjectStorageLocator(url); } } catch (IllegalArgumentException e) { Msg.error(this, "Invalid project path: " + path); } return null; } private void populateProjectURLList(List<URL> list, String propertyName) { String projectNames = Preferences.getProperty(propertyName, null, true); if (projectNames == null) { return; } StringTokenizer st = new StringTokenizer(projectNames, PROJECT_PATH_SEPARATOR); while (st.hasMoreElements()) { String urlStr = (String) st.nextElement(); try { URL url = GhidraURL.toURL(urlStr); if (GhidraURL.isLocalProjectURL(url) && !GhidraURL.localProjectExists(url)) { continue; } list.add(url); if (list.size() == RECENT_PROJECTS_LIMIT) { break; } } catch (IllegalArgumentException e) { Msg.error(this, "Invalid project path/URL: " + urlStr); } } } /** * Update preferences file with list of known projects. */ void updatePreferences() { setProjectLocatorProperty(recentlyOpenedProjectsList, RECENT_PROJECTS); setProjectURLProperty(recentlyViewedProjectsList, VIEWED_PROJECTS); if (serverInfo != null) { Preferences.setProperty(SERVER_INFO, serverInfo.getServerName() + ":" + serverInfo.getPortNumber()); } Preferences.setProperty(LAST_OPENED_PROJECT, lastOpenedProject != null ? lastOpenedProject.toString() : null); Preferences.store(); } private void setProjectLocatorProperty(List<ProjectLocator> list, String propertyName) { StringBuffer sb = new StringBuffer(); for (int i = 0; i < list.size(); i++) { ProjectLocator projectLocator = list.get(i); sb.append(projectLocator.toString()); if (i < list.size() - 1) { sb.append(PROJECT_PATH_SEPARATOR); } } Preferences.setProperty(propertyName, sb.toString()); } private void setProjectURLProperty(List<URL> list, String propertyName) { StringBuffer sb = new StringBuffer(); for (int i = 0; i < list.size(); i++) { URL url = list.get(i); sb.append(url.toExternalForm()); if (i < list.size() - 1) { sb.append(PROJECT_PATH_SEPARATOR); } } Preferences.setProperty(propertyName, sb.toString()); } private ServerInfo getServerInfo(String str) { if (str == null) { return null; } String host = null; String portStr = null; StringTokenizer st = new StringTokenizer(str, ":"); while (st.hasMoreTokens()) { if (host == null) { host = st.nextToken(); } else { portStr = st.nextToken(); } } if (host != null && portStr != null) { try { return new ServerInfo(host, Integer.parseInt(portStr)); } catch (NumberFormatException e) { // just return null below } } return null; } void projectClosed(DefaultProject project) { if (project == currentProject) { currentProject = null; } TransientDataManager.clearAll(); } }
/* * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package retrofacebook.app; import android.os.Bundle; import android.support.design.widget.FloatingActionButton; import android.support.design.widget.NavigationView; import android.support.design.widget.Snackbar; import android.support.design.widget.TabLayout; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentPagerAdapter; import android.support.v4.view.GravityCompat; import android.support.v4.view.ViewPager; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBar; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Toast; import android.content.Intent; import java.util.ArrayList; import java.util.List; import rx.Observable; import rx.functions.*; import retrofacebook.*; import butterknife.InjectView; import butterknife.ButterKnife; /** * TODO */ public class MainActivity extends AppCompatActivity { private DrawerLayout mDrawerLayout; private Facebook facebook; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); ButterKnife.inject(this); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); final ActionBar ab = getSupportActionBar(); ab.setHomeAsUpIndicator(R.drawable.ic_menu); ab.setDisplayHomeAsUpEnabled(true); mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout); NavigationView navigationView = (NavigationView) findViewById(R.id.nav_view); if (navigationView != null) { setupDrawerContent(navigationView); } ViewPager viewPager = (ViewPager) findViewById(R.id.viewpager); if (viewPager != null) { setupViewPager(viewPager); } FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fab); fab.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Snackbar.make(view, "Here's a Snackbar", Snackbar.LENGTH_LONG) .setAction("Action", null).show(); } }); TabLayout tabLayout = (TabLayout) findViewById(R.id.tabs); tabLayout.setupWithViewPager(viewPager); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.sample_actions, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.home: mDrawerLayout.openDrawer(GravityCompat.START); return true; } return super.onOptionsItemSelected(item); } private void setupViewPager(ViewPager viewPager) { Adapter adapter = new Adapter(getSupportFragmentManager()); setupAdapter(adapter); viewPager.setAdapter(adapter); } private void setupAdapter(Adapter adapter) { facebook = Facebook.create(this); adapter.fragments.add(FragmentPage.create().title("Photos").fragment(() -> { return RxCardsFragment.create().items(facebook.getUploadedPhotos().take(32) .doOnNext(photo -> { android.util.Log.d("RetroFacebook", "user: " + photo.from()); android.util.Log.d("RetroFacebook", "photo.caption: " + photo.caption()); }) .map(photo -> { RxCard card = new RxCard(); card.icon = Observable.just("http://graph.facebook.com/" + photo.from().id() + "/picture?width=400&height=400"); card.text1 = Observable.just(photo.from().name()); card.message = Observable.just(photo.caption()); card.image = Observable.just(photo.images().get(0).source()); if (photo.comments() != null) { card.comments = Observable.from(photo.comments().data()); card.commentCount = card.comments.count(); } else { card.comments = facebook.getComments(photo.id()); card.commentCount = card.comments.count(); } card.like = photo.like(); card.unlike = photo.unlike(); if (photo.likes() != null) { card.likeCount = Observable.from(photo.likes().data()).count(); card.liked = facebook.me().concatMap(me -> { android.util.Log.d("RetroFacebook", "me: " + me.id()); return Observable.from(photo.likes().data()).filter(user -> user.id().equals(me.id())).isEmpty().map(b -> !b); }); } else { card.likeCount = facebook.getLikedUsers(photo).count(); card.liked = facebook.me().concatMap(me -> { android.util.Log.d("RetroFacebook", "me: " + me.id()); return facebook.getLikedUsers(photo).filter(user -> user.id().equals(me.id())).isEmpty().map(b -> !b); }); } card.onCommentText = comment -> { return facebook.comment(comment, photo); }; return card; })); })); adapter.fragments.add(FragmentPage.create().title("Friends").fragment(() -> { return ListFragment.create().items(facebook.getFriends().map(user -> { return Item.builder() .icon("http://graph.facebook.com/" + user.id() + "/picture?width=400&height=400") .text1(user.name()) .build(); })); })); adapter.fragments.add(FragmentPage.create().title("Posts").fragment(() -> { return RxCardsFragment.create().items(facebook.getPosts().take(32).map(post -> { RxCard card = new RxCard(); card.icon = Observable.just("http://graph.facebook.com/" + post.from().id() + "/picture?width=400&height=400"); card.text1 = Observable.just(post.from().name()); card.message = Observable.just(post.message()); //card.image = Observable.just(post.picture()); card.image = post.photo().map(photo -> photo.images().get(0).source()); if (post.comments() != null) { card.comments = Observable.from(post.comments().data()); card.commentCount = card.comments.count(); } else { card.comments = facebook.getComments(post.id()); card.commentCount = card.comments.count(); } card.like = post.like(); card.unlike = post.unlike(); if (post.likes() != null) { card.likeCount = Observable.from(post.likes().data()).count(); card.liked = facebook.me().concatMap(me -> { android.util.Log.d("RetroFacebook", "me: " + me.id()); return Observable.from(post.likes().data()).filter(user -> user.id().equals(me.id())).isEmpty().map(b -> !b); }); } else { card.likeCount = facebook.getLikedUsers(post).count(); card.liked = facebook.me().concatMap(me -> { android.util.Log.d("RetroFacebook", "me: " + me.id()); return facebook.getLikedUsers(post).filter(user -> user.id().equals(me.id())).isEmpty().map(b -> !b); }); } card.onCommentText = comment -> { return facebook.comment(comment, post); }; return card; })); })); adapter.fragments.add(FragmentPage.create().title("Publish").fragment(() -> { return CardsFragment.create() .items(facebook.publish(Post.builder() .message("yo") .name("RetroFacebook") .caption("RetroFacebook") .description("Retrofit Facebook Android SDK") .picture("https://raw.githubusercontent.com/yongjhih/RetroFacebook/master/art/retrofacebook.png") .link("https://github.com/yongjhih/RetroFacebook") .build()) .map(response -> { return Card.builder() .text1(response.id()) .message(response.id()) .build(); })); })); adapter.fragments.add(FragmentPage.create().title("Albums").fragment(() -> { return RxCardsFragment.create().items(facebook.getAlbums().take(32).map(album -> { RxCard card = new RxCard(); card.icon = Observable.just("http://graph.facebook.com/" + album.from().id() + "/picture?width=400&height=400"); card.text1 = Observable.just(album.name()); card.message = Observable.just(album.id()); //card.image = facebook.getAlbumThumbnail(album.id()).map(pic -> pic.data().url()); // FACEBOOK_NON_JSON_RESULT return card; })); })); adapter.fragments.add(FragmentPage.create().title("Family").fragment(() -> { return RxCardsFragment.create().items(facebook.getFamily().take(32).map(user -> { RxCard card = new RxCard(); card.icon = Observable.just("http://graph.facebook.com/" + user.id() + "/picture?width=400&height=400"); card.text1 = Observable.just(user.name()); card.message = Observable.just(user.relationship()); return card; })); })); adapter.fragments.add(FragmentPage.create().title("Groups").fragment(() -> { return RxCardsFragment.create().items(facebook.getGroups().take(32).map(group -> { RxCard card = new RxCard(); card.icon = Observable.just(group.icon()); card.text1 = Observable.just(group.name()); card.message = Observable.just(group.description()); card.image = Observable.just(group.cover()).filter(c -> c != null).map(c -> c.source()); return card; })); })); adapter.fragments.add(FragmentPage.create().title("Notifications").fragment(() -> { return RxCardsFragment.create().items(facebook.getNotifications().take(32).map(notification -> { RxCard card = new RxCard(); card.icon = Observable.just("http://graph.facebook.com/" + notification.from().id() + "/picture?width=400&height=400"); card.text1 = Observable.just(notification.title()); card.message = Observable.just(notification.title()); return card; })); })); adapter.fragments.add(FragmentPage.create().title("Scores").fragment(() -> { return RxCardsFragment.create().items(facebook.getScores().take(32).map(score -> { RxCard card = new RxCard(); card.icon = Observable.just("http://graph.facebook.com/" + score.user().id() + "/picture?width=400&height=400"); card.text1 = Observable.just(score.application().name()); card.message = Observable.just("" + score.score()); return card; })); })); adapter.fragments.add(FragmentPage.create().title("Videos").fragment(() -> { return RxCardsFragment.create().items(facebook.getUploadedVideos().take(32).map(video -> { RxCard card = new RxCard(); card.icon = Observable.just("http://graph.facebook.com/" + video.from().id() + "/picture?width=400&height=400"); card.text1 = Observable.just(video.name()); card.message = Observable.just(video.description() + video.source()); card.image = Observable.just(video.picture()); return card; })); })); /* {FacebookServiceException: httpResponseCode: 400, facebookErrorCode: 15, facebookErrorType: OAuthException, message: (#15) This method must be called with an app access_token.} adapter.fragments.add(FragmentPage.create().fragment(() -> { return CardsFragment.create() .items(facebook.searchTopic("clinton") .map(p -> { return Card.builder() .text1(p.id()) .message(p.name()) .build(); })); }).title("Search")); // oauth adapter.fragments.add(FragmentPage.create().title("Mark Elliot Zuckerberg's photos").fragment(() -> { return CardsFragment.create() .items( facebook.getPhotos("4") .take(32) .doOnNext(photo -> { android.util.Log.d("RetroFacebook", "user: " + photo.from()); android.util.Log.d("RetroFacebook", "photo.caption: " + photo.caption()); }) .doOnCompleted(() -> { }) .map(photo -> { return Card.builder() .icon("http://graph.facebook.com/" + photo.from().id() + "/picture?width=400&height=400") .text1(photo.from().name()) .message(photo.caption()) .image(photo.images().get(0).source()) .build(); })); })); adapter.fragments.add(FragmentPage.create().title("Mark Elliot Zuckerberg's posts").fragment(() -> { return CardsFragment.create() .items(facebook.getPosts("4").take(32).map(post -> { return Card.builder() .icon("http://graph.facebook.com/" + post.from().id() + "/picture?width=400&height=400") .text1(post.from().name()) .message(post.message()) .build(); })); })); */ //adapter.fragments.add(FragmentPage.create().fragment(() -> new CheeseListFragment()).title("Category 4")); //adapter.fragments.add(FragmentPage.create().fragment(() -> new CheeseListFragment()).title("Category 5")); //adapter.fragments.add(FragmentPage.create().fragment(() -> new CheeseListFragment()).title("Category 6")); //adapter.fragments.add(FragmentPage.create().fragment(() -> new CheeseListFragment()).title("Category 7")); //adapter.fragments.add(FragmentPage.create().fragment(() -> new CheeseListFragment()).title("Category 8")); //adapter.fragments.add(FragmentPage.create().fragment(() -> new CheeseListFragment()).title("Category 9")); //adapter.fragments.add(FragmentPage.create().fragment(() -> new CheeseListFragment()).title("Category 10")); //adapter.notifyDataSetChanged(); } private void setupDrawerContent(NavigationView navigationView) { navigationView.setNavigationItemSelectedListener( new NavigationView.OnNavigationItemSelectedListener() { @Override public boolean onNavigationItemSelected(MenuItem menuItem) { menuItem.setChecked(true); mDrawerLayout.closeDrawers(); return true; } }); } static class FragmentPage { Func0<Fragment> onFragment; Fragment fragment; String title; public Fragment fragment() { if (fragment == null) fragment = onFragment.call(); return fragment; } public String title() { return title; } public FragmentPage fragment(Func0<Fragment> onFragment) { this.onFragment = onFragment; return this; } public FragmentPage title(String title) { this.title = title; return this; } public static FragmentPage create() { return new FragmentPage(); } } static class Adapter extends FragmentPagerAdapter { public List<FragmentPage> fragments = new ArrayList<>(); // NOTICE: memleak public Adapter(FragmentManager fm) { super(fm); } @Override public Fragment getItem(int position) { return fragments.get(position).fragment(); } @Override public int getCount() { return fragments.size(); } @Override public CharSequence getPageTitle(int position) { return fragments.get(position).title(); } //@Override //public int getItemPosition(Object object) { //return FragmentPagerAdapter.POSITION_NONE; //} } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); facebook.onActivityResult(requestCode, resultCode, data); } }
/* * Copyright (C) 2013-2014 Suresh Mahalingam. All rights reserved. * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. **/ package org.aredis.util.concurrent; import java.util.ArrayList; import java.util.List; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import org.aredis.cache.AsyncRedisConnection; /** * <p> * This is a Wrapper over a {@link LinkedBlockingQueue} for use with a Single Consumer. This is used by the internal request and * response Q's of {@link AsyncRedisConnection}. The Consumer Thread can optionally quit on an empty Q in which case * the Q is marked as IDLE. The writer has an option to acquire the Q if it is IDLE when writing to the head of the Q. * If the Q is acquired then then the writer should initiate the listener. * </p> * * <p> * Note: Only One consumer should be receiving for a SingleConsumerQueue. This can be ensured by using add * method to acquire an idle consumer status and initiate the consumer and the remove method to set back the consumer * status to idle and quit the consumer thread in case of an empty Q. * </p> * * @author Suresh * * @param <E> Type of items in the Q */ public class SingleConsumerQueue<E> { /** * Callback Interface to get called back when the IDLE status of the Q changes. * @author Suresh * * @param <E> */ public static interface IdleListener<E> { /** * Gets called before the Q is marked idle when releasing a Q. The call is from the consumer thread. * @param q The Q */ void beforeIdle(SingleConsumerQueue<E> q); /** * Gets called after the Q is marked active when acquiring an idle Q. The call is from the writer thread. * @param q The Q */ void afterAcquireIdle(SingleConsumerQueue<E> q); } public static enum QueueStatus{ACTIVE, IDLE}; /** * Default value of maxEnqueTimeMillis. */ public static final int DEFAULT_MAX_ENQUE_TIME_MILLIS = 10; private static final int FETCH_SIZE = 256; private BlockingQueue<E> q; private int lindex; private List<E> l; private volatile QueueStatus status; private IdleListener<E> idleListener; private int capacity; private long maxEnqueTimeMillis; /** * Creates a Q with the given capacity and maxEnqueTimeMillis to wait for when adding to the Q in case the Q is * full. * @param pcapacity Q capacity or 0 for unbounded * @param pmaxEnqueTimeMillis Max Milliseconds to wait when adding to Q if the Q is full */ public SingleConsumerQueue(int pcapacity, long pmaxEnqueTimeMillis) { if(pcapacity < 0) { throw new IllegalArgumentException("Q capacity cannot be negative"); } if(pmaxEnqueTimeMillis < 0) { throw new IllegalArgumentException("Max Enque time cannot be negative"); } q = new LinkedBlockingQueue<E>(); l = new ArrayList<E>(FETCH_SIZE); status = QueueStatus.IDLE; capacity = pcapacity; maxEnqueTimeMillis = pmaxEnqueTimeMillis; } /** * Creates a Q with the given capacity and default maxEnqueTimeMillis of 5 seconds * @param pcapacity Q capacity or 0 for unbounded */ public SingleConsumerQueue(int pcapacity) { this(pcapacity, DEFAULT_MAX_ENQUE_TIME_MILLIS); } /** * Creates a Q with unbounded capacity. */ public SingleConsumerQueue() { this(0, DEFAULT_MAX_ENQUE_TIME_MILLIS); } /** * Adds an item to the head of the Q. * @param e item to add * @param acquireIfIdle pass true to acquire the Q if currently idle. When used concurrently exactly one of the * callers gets the return value as true. * @return true if an idle Q has been acquired and marked active in which case the caller should start the Consumer * thread. Always returs false if acquireIfIdle has been passed as false. */ public boolean add(E e, boolean acquireIfIdle) { String msg = null; if(capacity == 0 || maxEnqueTimeMillis == 0) { if (!q.offer(e)) { msg = "Queue Full"; } } else { try { if(!q.offer(e, maxEnqueTimeMillis, TimeUnit.MILLISECONDS)) { msg = "Queue Full after " + maxEnqueTimeMillis + " ms"; } } catch (InterruptedException e1) { msg = "add Operation Interrupted"; } } if(msg != null) { throw new IllegalStateException(msg); } return acquireIfIdle && acquireIdle(); } /** * Remove from the tail of the Q. * @param timeoutMicros Max microseconds to wait for in case the Q is empty * @param releaseIfEmpty If true then the Q is marked idle if the Q is empty and this method returns null. * @return Item or null if the Q is empty */ public E remove(long timeoutMicros, boolean releaseIfEmpty) { int len = l.size(); E e = null; if(lindex >= len) { l.clear(); q.drainTo(l, FETCH_SIZE); len = l.size(); lindex = 0; if(len == 0 && timeoutMicros > 0) { try { e = q.poll(timeoutMicros, TimeUnit.MICROSECONDS); } catch (InterruptedException ie) { } } } if(len > 0) { e = l.get(lindex); lindex++; } if(releaseIfEmpty && e == null) { boolean ensuredEmptyOrAvail; do { ensuredEmptyOrAvail = true; IdleListener<E> listener = idleListener; if(listener != null) { listener.beforeIdle(this); } status = QueueStatus.IDLE; if(q.size() > 0 && acquireIdle()) { e = remove(0, false); // In the previous code the e == null was considered impossible and threw // an IllegalStateException. However it is possible if after the size check // and before acquiring the idle Q another thread acquired dequeued 1 item // successfully and then released it on attempting another remove in which // case acquireIdle will succeed but the Q will be empty if(e == null) { ensuredEmptyOrAvail = false; } } } while (!ensuredEmptyOrAvail); } return e; } /** * Push to head of Q. Non concurrent method to be used only by consumer. * @param e Item */ public void push(E e) { if(lindex > 0) { lindex--; l.set(lindex, e); } else { l.add(lindex, e); } } /** * Unconditionally mark the Q idle. The caller should be holding the Q. Normally there should * be a double check after this call to ensure that the idle condition continues to hold which * usually is that the Q continues to be empty. If not the processing action should be initiated * by trying to acquire back the idle Q. */ public void markIdle() { IdleListener<E> listener = idleListener; if(listener != null) { listener.beforeIdle(this); } status = QueueStatus.IDLE; } /** * Acquires an idle Q and marks it as active. * @return true if the Q is idle and has been acquired by this thread */ public boolean acquireIdle() { boolean acquired = false; if(status == QueueStatus.IDLE) { synchronized(this) { if(status == QueueStatus.IDLE) { status = QueueStatus.ACTIVE; acquired = true; } } } if(acquired) { IdleListener<E> listener = idleListener; if(listener != null) { listener.afterAcquireIdle(this); } } return acquired; } /** * Gets the size of the Q. * @return Q size */ public int size() { return q.size() + l.size(); } /** * Gets the idle listener in use. * @return The idle listener or null if not set */ public IdleListener<E> getIdleListener() { return idleListener; } /** * Sets an idle listener which is called whenever the Q status changes from IDLE to ACTIVE or vice-versa. * @param pidleListener Idle listener or null to unset the current listener */ public void setIdleListener(IdleListener<E> pidleListener) { idleListener = pidleListener; } }
/** * Copyright 2015 LinkedIn Corp. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ package metadata.etl.dataset.druid; import java.sql.DriverManager; import java.sql.Statement; import java.util.Properties; import java.sql.Connection; import org.hsqldb.persist.ScriptRunner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import wherehows.common.Constant; public class DruidMetadataLoader { private String JDBC_URL; private String JDBC_DRIVER; private String JDBC_USERNAME; private String JDBC_PASSWORD; private String DB_ID; private String WH_ETL_EXEC_ID; private String druid_ds_metadata_csv_file; private String druid_col_metadata_csv_file; protected final Logger logger = LoggerFactory.getLogger(getClass()); private String DROP_DS_METADATA = "DROP TABLE IF EXISTS wherehows.druid_ds_metadata_tbl; \n"; private String CREATE_DS_METADAT = "CREATE TABLE wherehows.druid_ds_metadata_tbl " + "(" + "id INT AUTO_INCREMENT PRIMARY KEY," + "name VARCHAR(100) NOT NULL," + "schema_desc VARCHAR(10000)," + "schema_type VARCHAR(20)," + "properties VARCHAR(500)," + "fields VARCHAR(10000)," + "urn VARCHAR(500)," + "source VARCHAR(50)," + "storage_type VARCHAR(20)," + "is_partitioned VARCHAR(20)" + ") ENGINE = InnoDB DEFAULT CHARSET = latin1; \n "; private String LOAD_DS_METADATA = "LOAD DATA LOCAL INFILE \"" + "$DRUID_DS_METADATA_CSV_FILE" + "\"" + " INTO TABLE wherehows.druid_ds_metadata_tbl " + "FIELDS TERMINATED BY '\\t' " + "LINES TERMINATED BY '\\n'" + "(@name, @schema_desc, @schema_type, @properties, @fields, @urn, @source, @storage_type, @is_partitioned)" + "SET name=@name," + "schema_desc=@schema_desc," + "schema_type=@schema_type," + "properties=@properties," + "fields=@fields," + "urn=@urn," + "source=@source," + "storage_type=@storage_type," + "is_partitioned=@is_partitioned" + "\n;"; private String DELETE_STG_DATASET_META = "DELETE FROM wherehows.stg_dict_dataset WHERE db_id = $DB_ID;\n"; private String LOAD_STG_DATASET_META = "INSERT INTO wherehows.stg_dict_dataset" + "(db_id, name, `schema`, schema_type, properties, fields, urn, source," + "location_prefix, parent_name, storage_type, dataset_type, is_partitioned, " + "source_created_time, source_modified_time, wh_etl_exec_id)" + "SELECT " + "$DB_ID, d.name, schema_desc, schema_type, properties, fields, urn, 'Druid'," + "NULL, NULL, 'TABLE', 'DRUID', is_partitioned, " + "UNIX_TIMESTAMP(STR_TO_DATE( substring_index(substring_index(properties, '/', 1), '.', 1), '%Y-%m-%dT%TZ')) , " + "UNIX_TIMESTAMP(STR_TO_DATE( substring_index(substring_index(properties, '/', -1), '.', 1), '%Y-%m-%dT%TZ')), " + "$WH_ETL_EXEC_ID " + "FROM druid_ds_metadata_tbl d, " + "(SELECT MAX(id) id, name FROM wherehows.druid_ds_metadata_tbl GROUP BY name) t " + "WHERE t.id=d.id"; private String DUMP_DS_METADATA = "INSERT INTO wherehows.dict_dataset " + "(" + "name, `schema`, schema_type, properties, fields, urn, source, " + "location_prefix, parent_name, storage_type, ref_dataset_id, " + "dataset_type, is_partitioned, " + "partition_layout_pattern_id, sample_partition_full_path, " + "source_created_time, source_modified_time, created_time, modified_time, wh_etl_exec_id, is_active " + ")" + "SELECT " + "name, `schema`, schema_type, properties, fields, urn, source, " + "location_prefix, parent_name, storage_type, ref_dataset_id, " + "dataset_type, is_partitioned, " + "partition_layout_pattern_id, sample_partition_full_path, " + "source_created_time, source_modified_time, created_time, UNIX_TIMESTAMP(now()), wh_etl_exec_id, 1 " + "FROM stg_dict_dataset s\n" + "WHERE db_id = $DB_ID " + "ON DUPLICATE KEY UPDATE " + "name = s.name, `schema`=s.schema, schema_type=s.schema_type, fields=s.fields, " + "properties=s.properties, source=s.source, location_prefix=s.location_prefix, " + "parent_name=s.parent_name, storage_type=s.storage_type, ref_dataset_id=s.ref_dataset_id, " + "dataset_type=s.dataset_type, hive_serdes_class=s.hive_serdes_class, " + "is_partitioned=s.is_partitioned, partition_layout_pattern_id = s.partition_layout_pattern_id, " + "sample_partition_full_path=s.sample_partition_full_path, source_created_time=s.source_created_time, " + "source_modified_time=s.source_modified_time, modified_time=UNIX_TIMESTAMP(NOW()), wh_etl_exec_id=s.wh_etl_exec_id;"; private String DROP_COL_METADATA = "DROP TABLE IF EXISTS wherehows.druid_col_metadata_tbl; \n"; private String CREATE_COL_METADAT = "CREATE TABLE wherehows.druid_col_metadata_tbl " + "(" + "id INT AUTO_INCREMENT PRIMARY KEY," + "ds_name VARCHAR(100) NOT NULL," + "urn VARCHAR(500) NOT NULL," + "properties VARCHAR(200)," + "field_name VARCHAR(100) NOT NULL," + "data_type VARCHAR(200)," + "data_size VARCHAR(50)," + "cardinality VARCHAR(50)," + "error_message VARCHAR(500)" + ") ENGINE = InnoDB DEFAULT CHARSET = latin1; \n "; private String LOAD_COL_METADATA = "LOAD DATA LOCAL INFILE \"" + "$DRUID_COL_METADATA_CSV_FILE" + "\"" + " INTO TABLE wherehows.druid_col_metadata_tbl " + "FIELDS TERMINATED BY '\\t' " + "LINES TERMINATED BY '\\n'" + "(@ds_name, @urn, @properties, @field_name, @data_type, @data_size, @cardinality, @error_message)" + "SET ds_name=@ds_name," + "urn=@urn," + "properties=@properties," + "field_name=@field_name," + "data_type=@data_type," + "data_size=@data_size," + "cardinality=@cardinality," + "error_message=@error_message" + "\n;"; private String DELETE_STG_FIELD_META = "DELETE FROM wherehows.stg_dict_field_detail WHERE db_id = $DB_ID"; private String LOAD_STG_FIELD_META = "INSERT INTO stg_dict_field_detail" + "(" + "db_id, urn, sort_id, parent_sort_id,field_name, " + "data_type, data_size, " + "is_nullable, is_indexed, is_partitioned, " + "namespace, last_modified" + ") " + "SELECT " + "$DB_ID, urn, id, 1, field_name, " + "data_type, data_size," + "'F', 'T', 'T'," + "ds_name, NOW() " + "FROM druid_col_metadata_tbl " + ";\n"; private String UPDATE_STG_FIELD_META = "UPDATE stg_dict_field_detail sf, dict_dataset d " + "SET sf.dataset_id=d.id WHERE sf.urn=d.urn " + "AND sf.db_id = $DB_ID;\n"; private String DUMP_NON_EXIST_FIELD = "CREATE TEMPORARY TABLE IF NOT EXISTS t_deleted_fields (primary key (field_id))\n" + "SELECT x.field_id " + "FROM stg_dict_field_detail s " + "join dict_dataset i " + "ON s.urn=i.urn AND s.db_id=$DB_ID " + "RIGHT JOIN dict_field_detail x " + "ON i.id=x.dataset_id AND s.field_name = x.field_name " + "WHERE s.field_name IS NULL " + "AND x.dataset_id IN " + "(" + "SELECT d.id dataset_id FROM stg_dict_field_detail k join dict_dataset d ON k.urn=d.urn AND k.db_id=$DB_ID" + ");\n"; private String DELETE_NON_EXIST_FIELD = "DELETE FROM dict_field_detail WHERE field_id IN (SELECT field_id FROM t_deleted_fields)\n;"; private String UPDATE_FIELD_METADATA = "UPDATE dict_field_detail t join " + "(" + "select x.field_id, s.* " + "from stg_dict_field_detail s join dict_field_detail x " + "on s.field_name=x.field_name and s.namespace=x.namespace and s.dataset_id=x.dataset_id " + "where s.db_id=$DB_ID " + "and ( " + "x.data_type<>s.data_type " + "or x.data_size<>s.data_size " + ") " + ") p " + "on t.field_id=p.field_id " + "set t.data_type=p.data_type, " + "t.data_size=p.data_size, " + "t.modified = NOW();\n"; private String INSERT_NEW_FIELD_METADATA = "INSERT IGNORE INTO dict_field_detail" + "(" + "dataset_id, sort_id, parent_sort_id, " + "field_name, namespace, data_type, data_size, " + "is_nullable, is_indexed, is_partitioned, is_distributed," + "modified" + ")" + "SELECT " + "dataset_id, sort_id, parent_sort_id, " + "field_name, namespace, data_type, data_size, " + "is_nullable, is_indexed, is_partitioned, is_distributed, " + "last_modified " + "FROM stg_dict_field_detail sf " + "WHERE sf.db_id = $DB_ID AND sf.dataset_id is not NULL " + "AND (sf.urn, sf.db_id) not in (select urn, db_id from t_deleted_fields)" + "AND (sf.field_name, sf.dataset_id, namespace) not in (select field_name, dataset_id, namespace from dict_field_detail)" + ";\n"; public DruidMetadataLoader(Properties prop) throws Exception { druid_ds_metadata_csv_file = prop.getProperty(Constant.DRUID_DATASOURCE_METADATA_CSV_FILE); druid_col_metadata_csv_file = prop.getProperty(Constant.DRUID_FIELD_METADATA_CSV_FILE); JDBC_URL = prop.getProperty(Constant.WH_DB_URL_KEY); JDBC_DRIVER = prop.getProperty(Constant.WH_DB_DRIVER_KEY); JDBC_USERNAME = prop.getProperty(Constant.WH_DB_USERNAME_KEY); JDBC_PASSWORD = prop.getProperty(Constant.WH_DB_PASSWORD_KEY); DB_ID = prop.getProperty(Constant.JOB_REF_ID_KEY); WH_ETL_EXEC_ID = prop.getProperty(Constant.WH_EXEC_ID_KEY); logger.debug("druid_ds_metadata_csv_file=" + druid_ds_metadata_csv_file); logger.debug("druid_col_metadata_csv_file=" + druid_col_metadata_csv_file); logger.debug("JDBC_URL=" + JDBC_URL); logger.debug("JDBC_DRIVER=" + JDBC_DRIVER); logger.debug("JDBC_USERNAME=" + JDBC_USERNAME); logger.debug("JDBC_PASSWORD=" + JDBC_PASSWORD); logger.debug("DB_ID=" + DB_ID); logger.debug("WH_ETL_EXEC_ID=" + WH_ETL_EXEC_ID); } public DruidMetadataLoader(String ds_csv_file, String col_csv_file, String db_id, String exec_id, String url, String driver, String usr, String pwd) throws Exception { if (ds_csv_file == null || ds_csv_file.length() == 0) { throw new Exception("CSV file is invalid"); } if (url == null || url.length() == 0) { throw new Exception("JDBC URL is invalid"); } if (driver == null || driver.length() == 0) { throw new Exception("JDBC Driver is invalid"); } if (usr == null || usr.length() == 0) { throw new Exception("JDBC Username is invalid"); } if (pwd == null || pwd.length() == 0) { throw new Exception("JDBC Password is invalid"); } druid_ds_metadata_csv_file = ds_csv_file; druid_col_metadata_csv_file = col_csv_file; DB_ID = db_id; JDBC_URL = url; JDBC_DRIVER = driver; JDBC_USERNAME = usr; JDBC_PASSWORD = pwd; WH_ETL_EXEC_ID = exec_id; } public void run() throws Exception { loadDatasourceMetadata(); } public void loadDatasourceMetadata() throws Exception { Class.forName(JDBC_DRIVER); Connection conn = DriverManager.getConnection(JDBC_URL, JDBC_USERNAME, JDBC_PASSWORD); Statement stmt = conn.createStatement(); logger.info("Load file " + druid_ds_metadata_csv_file + " to table 'dict_dataset'"); logger.debug("Running query:" + DROP_DS_METADATA); stmt.executeUpdate(DROP_DS_METADATA); logger.debug("Running query:" + CREATE_DS_METADAT); stmt.executeUpdate(CREATE_DS_METADAT); logger.debug( "Running query:" + LOAD_DS_METADATA.replace("$DRUID_DS_METADATA_CSV_FILE", druid_ds_metadata_csv_file)); stmt.executeUpdate(LOAD_DS_METADATA.replace("$DRUID_DS_METADATA_CSV_FILE", druid_ds_metadata_csv_file)); logger.debug("Running query:" + DELETE_STG_DATASET_META.replace("$DB_ID", DB_ID)); stmt.executeUpdate(DELETE_STG_DATASET_META.replace("$DB_ID", DB_ID)); logger.debug( "Running query:" + LOAD_STG_DATASET_META.replace("$DB_ID", DB_ID).replace("$WH_ETL_EXEC_ID", WH_ETL_EXEC_ID)); stmt.executeUpdate(LOAD_STG_DATASET_META.replace("$DB_ID", DB_ID).replace("$WH_ETL_EXEC_ID", WH_ETL_EXEC_ID)); logger.debug("Running query:" + DUMP_DS_METADATA.replace("$DB_ID", DB_ID)); stmt.executeUpdate(DUMP_DS_METADATA.replace("$DB_ID", DB_ID)); logger.info("Load file " + druid_col_metadata_csv_file + " to table 'dict_field'"); logger.debug("Running query:" + DROP_COL_METADATA); stmt.executeUpdate(DROP_COL_METADATA); logger.debug("Running query:" + CREATE_COL_METADAT); stmt.executeUpdate(CREATE_COL_METADAT); logger.debug( "Running query:" + LOAD_COL_METADATA.replace("$DRUID_COL_METADATA_CSV_FILE", druid_col_metadata_csv_file)); stmt.executeUpdate(LOAD_COL_METADATA.replace("$DRUID_COL_METADATA_CSV_FILE", druid_col_metadata_csv_file)); logger.debug("Running query:" + DELETE_STG_FIELD_META.replace("$DB_ID", DB_ID)); stmt.executeUpdate(DELETE_STG_FIELD_META.replace("$DB_ID", DB_ID)); logger.debug("Running query:" + LOAD_STG_FIELD_META.replace("$DB_ID", DB_ID)); stmt.executeUpdate(LOAD_STG_FIELD_META.replace("$DB_ID", DB_ID)); logger.debug("Running query:" + UPDATE_STG_FIELD_META.replace("$DB_ID", DB_ID)); stmt.executeUpdate(UPDATE_STG_FIELD_META.replace("$DB_ID", DB_ID)); logger.debug("Running query:" + DUMP_DS_METADATA.replace("$DB_ID", DB_ID)); stmt.executeUpdate(DUMP_NON_EXIST_FIELD.replace("$DB_ID", DB_ID)); logger.debug("Running query:" + DELETE_NON_EXIST_FIELD); stmt.executeUpdate(DELETE_NON_EXIST_FIELD); logger.debug("Running query:" + UPDATE_FIELD_METADATA.replace("$DB_ID", DB_ID)); stmt.executeUpdate(UPDATE_FIELD_METADATA.replace("$DB_ID", DB_ID)); logger.debug("Running query:" + INSERT_NEW_FIELD_METADATA.replace("$DB_ID", DB_ID)); stmt.executeUpdate(INSERT_NEW_FIELD_METADATA.replace("$DB_ID", DB_ID)); stmt.close(); } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package org.tmf.dsmapi.common.impl; import java.text.NumberFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.AbstractMap; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.List; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; import javax.persistence.EntityManager; import javax.persistence.TypedQuery; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Expression; import javax.persistence.criteria.Path; import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; import javax.ws.rs.core.MultivaluedMap; import org.tmf.dsmapi.common.exceptions.BadUsageException; import org.tmf.dsmapi.common.exceptions.ExceptionType; /** * * @author pierregauthier */ public abstract class AbstractFacade<T> { private Class<T> entityClass; private static final String pattern = "yyyy-MM-dd'T'HH:mm:ssZ"; private static SimpleDateFormat formatter = new SimpleDateFormat(pattern); public AbstractFacade(Class<T> entityClass) { this.entityClass = entityClass; } protected abstract EntityManager getEntityManager(); public void create(T entity) { getEntityManager().persist(entity); } public int create(List<T> entities) { for (T entity : entities) { this.create(entity); } return entities.size(); } public void edit(T entity) { getEntityManager().merge(entity); } public void remove(T entity) { getEntityManager().remove(getEntityManager().merge(entity)); } public T find(Object id) { return getEntityManager().find(entityClass, id); } public void detach(T entity) { getEntityManager().detach(entity); } public void clear() { getEntityManager().clear(); } public List<T> findAll() { javax.persistence.criteria.CriteriaQuery cq = getEntityManager().getCriteriaBuilder().createQuery(); cq.select(cq.from(entityClass)); return getEntityManager().createQuery(cq).getResultList(); } public List<T> findRange(int[] range) { javax.persistence.criteria.CriteriaQuery cq = getEntityManager().getCriteriaBuilder().createQuery(); cq.select(cq.from(entityClass)); javax.persistence.Query q = getEntityManager().createQuery(cq); q.setMaxResults(range[1] - range[0]); q.setFirstResult(range[0]); return q.getResultList(); } public int count() { javax.persistence.criteria.CriteriaQuery cq = getEntityManager().getCriteriaBuilder().createQuery(); javax.persistence.criteria.Root<T> rt = cq.from(entityClass); cq.select(getEntityManager().getCriteriaBuilder().count(rt)); javax.persistence.Query q = getEntityManager().createQuery(cq); return ((Long) q.getSingleResult()).intValue(); } public List<T> findByCriteria(MultivaluedMap<String, String> map, Class<T> clazz) { List<T> resultsList = null; try { CriteriaBuilder criteriaBuilder = getEntityManager().getCriteriaBuilder(); CriteriaQuery<T> cq = criteriaBuilder.createQuery(clazz); List<Predicate> andPredicates = new ArrayList<Predicate>(); Root<T> tt = cq.from(clazz); for (Map.Entry<String, List<String>> entry : map.entrySet()) { List<String> valueList = entry.getValue(); Predicate predicate = null; if (valueList.size() > 1) { // name=value1&name=value2&...&name=valueN // value of name is list [value1, value2, ..., valueN] // => name=value1 OR name=value2 OR ... OR name=valueN List<Predicate> orPredicates = new ArrayList<Predicate>(); for (String currentValue : valueList) { Predicate orPredicate = buildPredicate(tt, entry.getKey(), currentValue); orPredicates.add(orPredicate); } predicate = criteriaBuilder.or(orPredicates.toArray(new Predicate[orPredicates.size()])); } else { // name=value // value of name is one element list [value] // => name=value predicate = buildPredicate(tt, entry.getKey(), valueList.get(0)); } andPredicates.add(predicate); } cq.where(andPredicates.toArray(new Predicate[andPredicates.size()])); cq.select(tt); TypedQuery<T> q = getEntityManager().createQuery(cq); resultsList = q.getResultList(); return resultsList; } catch (Exception ex) { Logger.getLogger(AbstractFacade.class.getName()).log(Level.INFO, "findByCriteria error, null result returned", ex); return null; } } private Predicate buildPredicate(Path<T> tt, String name, String value) throws BadUsageException { Predicate predicate = null; int index = name.indexOf('.'); if (index > 0 && index < name.length()) { // nested format : rootFieldName.subFieldName=value String rootFieldName = name.substring(0, index); String subFieldName = name.substring(index + 1); Path<T> root = tt.get(rootFieldName); predicate = buildPredicate(root, subFieldName, value); } else { // simple format : name=value predicate = buildSimplePredicate(tt, name, value); } return predicate; } private Predicate buildSimplePredicate(Path<T> tt, String name, String value) throws BadUsageException { Predicate predicate; CriteriaBuilder criteriaBuilder = getEntityManager().getCriteriaBuilder(); if (isMultipleAndValue(value)) { // name=(subname1=value1&subname2=value2&...&subnameN=valueN) // => name.subname1=value1 AND name.subname2=value2 AND ... AND name.subnameN=valueN List<Map.Entry<String, String>> subFieldNameValue = convertMultipleAndValue(value); List<Predicate> andPredicates = new ArrayList<Predicate>(); Path<T> root = tt.get(name); for (Map.Entry<String, String> entry : subFieldNameValue) { String currentsubFieldName = entry.getKey(); String currentValue = entry.getValue(); Predicate andPredicate = buildPredicate(root, currentsubFieldName, currentValue); andPredicates.add(andPredicate); } predicate = criteriaBuilder.and(andPredicates.toArray(new Predicate[andPredicates.size()])); } else if (isMultipleOrValue(value)) { // name=value1,value2,...,valueN // => name=value1 OR name=value2 OR ... OR name=valueN List<String> valueList = convertMultipleOrValueToList(value); List<Predicate> orPredicates = new ArrayList<Predicate>(); for (String currentValue : valueList) { Predicate orPredicate = buildPredicateWithOperator(tt, name, currentValue); orPredicates.add(orPredicate); } predicate = criteriaBuilder.or(orPredicates.toArray(new Predicate[orPredicates.size()])); } else { // name=value predicate = buildPredicateWithOperator(tt, name, value); } return predicate; } // value has format value1,value2,...,valueN private static boolean isMultipleOrValue(String value) { return (value.indexOf(",") > -1); } // value has format (value1&value2&...&valueN) private static boolean isMultipleAndValue(String value) { return (value.startsWith("(") && value.endsWith(")")); } // convert String "value1,value2,...,valueN" // to List [value1, value2, ..., valueN] private static List<String> convertMultipleOrValueToList(String value) { List<String> valueList = new ArrayList<String>(); String[] tokenArray = value.split(","); valueList.addAll(Arrays.asList(tokenArray)); return valueList; } // convert String "(name1=value1&name2=value2&...&nameN=valueN)" // to List of Entry [name1=value1, name2=value2, ..., nameN=valueN] // Conversion is not to a Map since there can be a same name with differents values private static List<Map.Entry<String, String>> convertMultipleAndValue(String multipleValue) { List<Map.Entry<String, String>> nameValueList = new ArrayList<Map.Entry<String, String>>(); if (multipleValue.startsWith("(") && multipleValue.endsWith(")")) { String[] tokenArray = multipleValue.substring(1, multipleValue.length() - 1).split("&"); for (String nameValue : tokenArray) { String[] split = nameValue.split("="); if (split.length == 2) { String name = split[0]; String value = split[1]; nameValueList.add(new AbstractMap.SimpleEntry<String, String>(name, value)); } } } return nameValueList; } // safe Enum.valueOf without exception private Enum safeEnumValueOf(Class enumType, String name) { Enum enumValue = null; if (name != null) { try { enumValue = Enum.valueOf(enumType, name); } catch (Exception e) { enumValue = null; } } return enumValue; } private Object convertStringValueToObject(String value, Class clazz) throws BadUsageException { Object convertedValue = null; if (clazz.isEnum()) { convertedValue = safeEnumValueOf(clazz, value); } else if (Date.class.isAssignableFrom(clazz)) { try { convertedValue = formatter.parse(value); } catch (ParseException ex) { convertedValue = null; } } else if ((clazz.isPrimitive() && !clazz.equals(boolean.class)) || (Number.class.isAssignableFrom(clazz))){ try { convertedValue = NumberFormat.getInstance().parse(value); } catch (ParseException ex) { convertedValue = null; } } else { convertedValue = value; } if (convertedValue != null){ return convertedValue; } else { throw new BadUsageException(ExceptionType.BAD_USAGE_FORMAT, "Wrong format for value " + value); } } // operators = and <> are compatibles with all types // operators < > <= >= are compatibles with numbers and dates // operator "LIKE" is compatible with String private boolean classCompatibleWithOperator(Class clazz, Operator operator) { if (operator == null) { return true; } else { switch (operator) { case NE: case EQ: return true; case GT: case GTE: case LT: case LTE: return (Date.class.isAssignableFrom(clazz) || (clazz.isPrimitive() && !clazz.equals(boolean.class)) || Number.class.isAssignableFrom(clazz)); case EX: return String.class.equals(clazz); default: return false; } } } protected Predicate buildPredicateWithOperator(Path<T> tt, String name, String value) throws BadUsageException { CriteriaBuilder criteriaBuilder = getEntityManager().getCriteriaBuilder(); Operator operator = Operator.fromString(name); // perform operation, default operation is equal if (operator == null) { Path<T> attribute = tt.get(name); Object valueObject = convertStringValueToObject(value, attribute.getJavaType()); System.out.println("### bp RETURN "+name+"="+value); return criteriaBuilder.equal(attribute, valueObject); } else { Class javaType = tt.getJavaType(); if (! classCompatibleWithOperator(javaType, operator)) { throw new BadUsageException(ExceptionType.BAD_USAGE_OPERATOR, operator.getValue()+" operator incompatible with field"); } Object valueObject = convertStringValueToObject(value, javaType); switch (operator) { case GT: return criteriaBuilder.greaterThan((Expression) tt, (Comparable) valueObject); case GTE: return criteriaBuilder.greaterThanOrEqualTo((Expression) tt, (Comparable) valueObject); case LT: return criteriaBuilder.lessThan((Expression) tt, (Comparable) valueObject); case LTE: return criteriaBuilder.lessThanOrEqualTo((Expression) tt, (Comparable) valueObject); case NE: return criteriaBuilder.notEqual(tt, valueObject); case EQ: return criteriaBuilder.equal(tt, valueObject); case EX: return criteriaBuilder.like((Expression) tt, value.replace('*', '%')); default: { Path<T> attribute = tt.get(name); valueObject = convertStringValueToObject(value, attribute.getJavaType()); return criteriaBuilder.equal(attribute, valueObject); } } } } enum Operator { EQ("eq"), GT("gt"), GTE("gte"), LT("lt"), LTE("lte"), NE("ne"), EX("ex"); private String value; Operator(String value) { this.value = value; } public String getValue() { return this.value; } public static Operator fromString(String value) { if (value != null) { for (Operator b : Operator.values()) { if (value.equalsIgnoreCase(b.value)) { return b; } } } return null; } } }
/* * Copyright (C) The Apache Software Foundation. All rights reserved. * * This software is published under the terms of the Apache Software * License version 1.1, a copy of which has been included with this * distribution in the LICENSE.APL file. */ // Altough located under org/apache/log4j/test, org.apache.log4j is // the correct package. package org.apache.log4j; import java.util.Calendar; import java.util.Date; import java.util.TimeZone; import junit.framework.TestCase; import junit.framework.TestSuite; import junit.framework.TestFailure; import junit.framework.Test; import org.apache.log4j.*; public class UnitTestDRFA extends TestCase { public UnitTestDRFA(String name) { super(name); } public void testComputeCheckPeriod() { DailyRollingFileAppender drfa = new DailyRollingFileAppender(); drfa.setName("testComputeCheckPeriod"); drfa.setDatePattern("yyyy-MM-dd.'log'"); assertEquals(drfa.computeCheckPeriod(), DailyRollingFileAppender.TOP_OF_DAY); drfa.setDatePattern("yyyy-MM-dd mm.'log'"); assertEquals(drfa.computeCheckPeriod(), DailyRollingFileAppender.TOP_OF_MINUTE); drfa.setDatePattern("yyyy-MM-dd a.'log'"); assertEquals(drfa.computeCheckPeriod(), DailyRollingFileAppender.HALF_DAY); drfa.setDatePattern("yyyy-MM-dd HH.'log'"); assertEquals(drfa.computeCheckPeriod(), DailyRollingFileAppender.TOP_OF_HOUR); drfa.setDatePattern("yyyy-MM.'log'"); assertEquals(drfa.computeCheckPeriod(), DailyRollingFileAppender.TOP_OF_MONTH); drfa.setDatePattern("'log'HH'log'"); assertEquals(drfa.computeCheckPeriod(), DailyRollingFileAppender.TOP_OF_HOUR); } public void testRC1() { RollingCalendar rc = new RollingCalendar(); rc.setType(DailyRollingFileAppender.TOP_OF_DAY); Calendar c = Calendar.getInstance(); // jan, mar, may, july, aug, oct, dec have 31 days int [] M31 = {0,2,4,6,7,9,11}; for(int i = 0; i < M31.length; i ++) { for(int d = 1; d <=31; d++) { for(int h = 0; h < 23; h++) { c.clear(); c.set(Calendar.YEAR, 20); c.set(Calendar.MONTH, Calendar.JANUARY + M31[i]); c.set(Calendar.DAY_OF_MONTH, d); c.set(Calendar.HOUR_OF_DAY, h); c.set(Calendar.MINUTE, 10); c.set(Calendar.SECOND, 10); c.set(Calendar.MILLISECOND, 88); c.setTime(rc.getNextCheckDate(c.getTime())); if(d == 31) { assertEquals(c.get(Calendar.MONTH),(Calendar.JANUARY+M31[i]+1)%12); assertEquals(c.get(Calendar.DAY_OF_MONTH), 1); } else { assertEquals(c.get(Calendar.MONTH), Calendar.JANUARY+M31[i]); assertEquals(c.get(Calendar.DAY_OF_MONTH), d+1); } assertEquals(c.get(Calendar.HOUR_OF_DAY), 0); assertEquals(c.get(Calendar.MINUTE), 0); assertEquals(c.get(Calendar.SECOND), 0); assertEquals(c.get(Calendar.MILLISECOND), 0); } } } } public void testRC2() { RollingCalendar rc = new RollingCalendar(); rc.setType(DailyRollingFileAppender.TOP_OF_HOUR); Calendar c = Calendar.getInstance(); TimeZone tz = c.getTimeZone(); // jan, mar, may, july, aug, oct, dec have 31 days int [] M31 = {0,2,4,6,7,9,11}; for(int i = 0; i < M31.length; i ++) { System.out.println("Month = "+(M31[i]+1)); for(int d = 1; d <= 31; d++) { for(int h = 0; h < 23; h++) { for(int m = 0; m <= 59; m++) { c.clear(); c.set(Calendar.YEAR, 20); c.set(Calendar.MONTH, Calendar.JANUARY + M31[i]); c.set(Calendar.DAY_OF_MONTH, d); c.set(Calendar.HOUR_OF_DAY, h); c.set(Calendar.MINUTE, m); c.set(Calendar.SECOND, 12); c.set(Calendar.MILLISECOND, 88); boolean dltState0 = c.getTimeZone().inDaylightTime(c.getTime()); c.setTime(rc.getNextCheckDate(c.getTime())); boolean dltState1 = c.getTimeZone().inDaylightTime(c.getTime()); assertEquals(c.get(Calendar.MILLISECOND), 0); assertEquals(c.get(Calendar.SECOND), 0); assertEquals(c.get(Calendar.MINUTE), 0); if(dltState0 == dltState1) { assertEquals(c.get(Calendar.HOUR_OF_DAY), (h+1)%24); } else { // returning to standard time if(dltState0) { assertEquals(c.get(Calendar.HOUR_OF_DAY), h); } else { // switching to day light saving time //System.err.println("m="+m+", h="+h+", d="+d+", i="+i); //if(h==2) { // System.err.println(c); //} //assertEquals(c.get(Calendar.HOUR_OF_DAY), (h+2)%24); } } if(h == 23) { assertEquals(c.get(Calendar.DAY_OF_MONTH), (d+1)%32); if(d == 31) { assertEquals(c.get(Calendar.MONTH), (Calendar.JANUARY+M31[i]+1)%12); } else { assertEquals(c.get(Calendar.MONTH), Calendar.JANUARY+M31[i]); } } else { assertEquals(c.get(Calendar.DAY_OF_MONTH), d); assertEquals(c.get(Calendar.MONTH), Calendar.JANUARY+M31[i]); } } } } } } public void testRC3() { RollingCalendar rc = new RollingCalendar(); rc.setType(DailyRollingFileAppender.TOP_OF_MINUTE); int[] S = {0, 1, 5, 10, 21, 30, 59}; int[] M = {0, 1, 5, 10, 21, 30, 59}; Calendar c = Calendar.getInstance(); // jan, mar, may, july, aug, oct, dec have 31 days int [] M31 = {2,9,0,4,6,7,11}; for(int i = 0; i < M31.length; i ++) { System.out.println("Month = "+(M31[i]+1)); for(int d = 1; d <= 31; d++) { for(int h = 0; h < 23; h++) { for(int m = 0; m < M.length; m++) { for(int s = 0; s < S.length; s++) { c.clear(); c.set(Calendar.YEAR, 20); c.set(Calendar.MONTH, Calendar.JANUARY + M31[i]); c.set(Calendar.DAY_OF_MONTH, d); c.set(Calendar.HOUR_OF_DAY, h); c.set(Calendar.MINUTE, M[m]); c.set(Calendar.SECOND, S[s]); c.set(Calendar.MILLISECOND, 88); c.add(Calendar.MILLISECOND, 1); boolean dltState0 = c.getTimeZone().inDaylightTime(c.getTime()); c.setTime(rc.getNextCheckDate(c.getTime())); c.add(Calendar.MILLISECOND, 0); boolean dltState1 = c.getTimeZone().inDaylightTime(c.getTime()); assertEquals(c.get(Calendar.MILLISECOND), 0); assertEquals(c.get(Calendar.SECOND), 0); assertEquals(c.get(Calendar.MINUTE), (M[m]+1)%60); if(M[m] == 59) { if(dltState0 == dltState1) { assertEquals(c.get(Calendar.HOUR_OF_DAY), (h+1)%24); } if(h == 23) { assertEquals(c.get(Calendar.DAY_OF_MONTH), (d+1)%32); if(d == 31) { assertEquals(c.get(Calendar.MONTH), (Calendar.JANUARY+M31[i]+1)%12); } else { assertEquals(c.get(Calendar.MONTH), Calendar.JANUARY+M31[i]); } } else { assertEquals(c.get(Calendar.DAY_OF_MONTH), d); } } else { // allow discrepancies only if we are switching from std to dls time if(c.get(Calendar.HOUR_OF_DAY) != h) { c.add(Calendar.HOUR_OF_DAY, +1); boolean dltState2 = c.getTimeZone().inDaylightTime(c.getTime()); if(dltState1 == dltState2) { fail("No switch"); } } assertEquals(c.get(Calendar.DAY_OF_MONTH), d); assertEquals(c.get(Calendar.MONTH), Calendar.JANUARY+M31[i]); } } } } } } } public static Test suite() { TestSuite suite = new TestSuite(); suite.addTest(new UnitTestDRFA("testComputeCheckPeriod")); suite.addTest(new UnitTestDRFA("testRC1")); //suite.addTest(new UnitTestDRFA("testRC2")); suite.addTest(new UnitTestDRFA("testRC3")); return suite; } }
/*L * Copyright ScenPro Inc, SAIC-F * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cadsr-freestyle-search/LICENSE.txt for details. */ // Copyright (c) 2006 ScenPro, Inc. // $Header: /share/content/gforge/freestylesearch/freestylesearch/src/gov/nih/nci/cadsr/freestylesearch/ui/FreestyleSearchForm.java,v 1.8 2009-04-09 13:59:23 hebell Exp $ // $Name: not supported by cvs2svn $ package gov.nih.nci.cadsr.freestylesearch.ui; import gov.nih.nci.cadsr.freestylesearch.util.Search; import gov.nih.nci.cadsr.freestylesearch.util.SearchAC; import gov.nih.nci.cadsr.freestylesearch.util.SearchException; import gov.nih.nci.cadsr.freestylesearch.util.SearchMatch; import javax.servlet.http.HttpServletRequest; import org.apache.struts.Globals; import org.apache.struts.action.ActionErrors; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionMapping; import org.apache.struts.action.ActionMessage; import org.apache.struts.util.MessageResources; //import org.jboss.Version; /** * The ActionForm mapping the JSP used by this example user interface. * * @author lhebel Mar 7, 2006 */ public class FreestyleSearchForm extends ActionForm { /** * Constructor * */ public FreestyleSearchForm() { // Set defaults for all data. _excludeRetired = false; _firstTime = "Y"; _displayOptions = "N"; _phrase = ""; _limit = 100; _score = 3; _matching = SearchMatch.BEST; _types = new boolean[SearchAC.count()]; for (int i = 0; i < _types.length; ++i) _types[i] = true; } /** * Set the search phrase input field. * * @param val_ the search phrase, i.e. one or more terms */ public void setPhrase(String val_) { _phrase = val_; } /** * Get the input search phrase. * * @return return the user search phrase */ public String getPhrase() { return _phrase; } /** * Get the maximum results limit. * * @return the current maximum return limit. */ public String getLimit() { return String.valueOf(_limit); } /** * Get the maximum results limit. * * @return the current maximum return limit. */ public int getLimitInt() { return _limit; } /** * Set the maximum results limit. * * @param limit_ the limit for the result set. */ public void setLimit(String limit_) { _limit = Integer.parseInt(limit_); } /** * Get the term comparison mode. * * @return the term comparison mode, i.e. exact, partial or best. */ public String getMatching() { return String.valueOf(_matching.toInt()); } /** * Get the term comparison mode. * * @return the term comparison mode, i.e. exact, partial or best. */ public SearchMatch getMatchingEnum() { return _matching; } /** * Set the term comparison mode. * * @param matching_ the integer term comparison as defined in SearchMatch.toInt(). */ public void setMatching(String matching_) { _matching = SearchMatch.valueOf(Integer.parseInt(matching_)); } /** * Get the score group count limit. * * @return the score group count limit */ public String getScore() { return String.valueOf(_score); } /** * Get the score group count limit. * * @return the score group count limit. */ public int getScoreInt() { return _score; } /** * Set the score group count limit. * * @param score_ the score group count limit. */ public void setScore(String score_) { _score = Integer.parseInt(score_); } /** * Get the display options flag, used to control the "Options" on the user interface. * * @return the display options flag */ public String getDisplayOptions() { return _displayOptions; } /** * Set the display options flag, used to control the "Options" on the user interface. * * @param opt_ the display options flag */ public void setDisplayOptions(String opt_) { _displayOptions = opt_; } /** * Get the AC types selections. * * @return the restriction settings by type */ public boolean[] getTypes() { return _types; } /** * Get the first time flag, i.e. is this the first use of freestyle in the current browser session. * * @return return the first time flag */ public String getFirstTime() { return _firstTime; } /** * Set the browser session first time flag. * * @param flag_ 'Y' for the first time, otherwise 'N' */ public void setFirstTime(String flag_) { _firstTime = flag_; } /** * Set the exclude retired AC flag * * @param flag_ "Y" to exclude reitred AC's, otherwise don't exclude them. */ public void setExcludeRetired(String flag_) { if (flag_ != null && flag_.equals("Y")) _excludeRetired = true; else _excludeRetired = false; } /** * Get the exlude retired AC flag. * * @return "Y" to exclude retired AC's. */ public String getExcludeRetired() { return (_excludeRetired) ? "Y" : "N"; } /** * Get the exlude retired AC flag. * * @return "Y" to exclude retired AC's. */ public boolean getExcludeRetiredBool() { return _excludeRetired; } /** * Set the exclude Test Context flag * * @param flag_ "Y" to exclude "Test" */ public void setExcludeTest(String flag_) { if (flag_ != null && flag_.equals("Y")) _excludeTest = true; else _excludeTest = false; } /** * Get the exclude Test Context flag * * @return "Y" to exclude "Test" */ public String getExcludeTest() { return (_excludeTest) ? "Y" : "N"; } /** * Get the exclude Training Context flag * * @return true to exclude "Training" */ public boolean getExcludeTrainBool() { return _excludeTrain; } /** * Set the exclude Training Context flag * * @param flag_ "Y" to exclude "Training" */ public void setExcludeTrain(String flag_) { if (flag_ != null && flag_.equals("Y")) _excludeTrain = true; else _excludeTrain = false; } /** * Get the exclude Trianing Context flag * * @return "Y" to exclude "Training" */ public String getExcludeTrain() { return (_excludeTrain) ? "Y" : "N"; } /** * Get the exclude Test Context flag * * @return true to exclude "Test" */ public boolean getExcludeTestBool() { return _excludeTest; } /** * Set the footer banner * * @param val_ the footer */ public void setFooter(String val_) { _footer = val_; _footer = null; } /** * Get the footer banner * * @return the footer */ public String getFooter() { return _footer; } /** * Validate the content of the Edit Screen. * * @param mapping_ * The action map defined for Edit. * @param request_ * The servlet request object. * @return Any errors found. */ public ActionErrors validate(ActionMapping mapping_, HttpServletRequest request_) { ActionErrors errors = new ActionErrors(); FreestylePlugIn ds = (FreestylePlugIn) request_.getSession().getServletContext().getAttribute(FreestylePlugIn._DATASOURCE); if (_footer == null) { MessageResources msgs = (MessageResources) request_.getSession().getServletContext().getAttribute(Globals.MESSAGES_KEY); String temp = msgs.getMessage(Search._vers); //String jboss = Version.getInstance().getMajor() + "." + Version.getInstance().getMinor() + "." + Version.getInstance().getRevision(); String jboss = "5.1.0"; _footer = "<table class=\"table3\"><colgroup></colgroup><tbody class=\"secttbody\" />\n" + "<tr><td class=\"ncifmenu\"><span style=\"color: #dddddd\">&nbsp;v&nbsp;" + temp + "&nbsp;(" + jboss + "/" + System.getProperty("java.version") + ")" + "</span></td></tr>\n" + "<tr>\n<td class=\"nciftrtable\">\n" + "<a href=\"mailto:ncicb@pop.nci.nih.gov?subject=caDSR%20Freestyle%20Search\"><img border=\"0\" src=\"/freestyle/images/email_icon.gif\" alt=\"Email NCI Help Desk\" title=\"Email NCI Help Desk\"></a>\n" + "<a target=\"_blank\" href=\"http://www.cancer.gov/\"><img border=\"0\" src=\"/freestyle/images/footer_nci.gif\" alt=\"National Cancer Institute Logo\" title=\"National Cancer Institute\"></a>\n" + "<a target=\"_blank\" href=\"http://www.dhhs.gov/\"><img border=\"0\" src=\"/freestyle/images/footer_hhs.gif\" alt=\"Department of Health and Human Services Logo\" title=\"Department of Health and Human Services\"></a>\n" + "<a target=\"_blank\" href=\"http://www.nih.gov/\"><img border=\"0\" src=\"/freestyle/images/footer_nih.gif\" alt=\"National Institutes of Health Logo\" title=\"National Institutes of Health\"></a>\n" + "<a target=\"_blank\" href=\"http://www.usa.gov/\"><img border=\"0\" src=\"/freestyle/images/footer_usagov.gif\" alt=\"USA.gov\" title=\"USA.gov\"></a>\n" + "</td>\n</tr>\n</table>\n"; } Search var = new Search(); var.setDataDescription(ds.getDataSource()); String seedTime = null; try { seedTime = var.getLastSeedTimestampString(); } catch (SearchException ex) { seedTime = ex.toString(); } request_.setAttribute("seedTime", seedTime); // The absence of a search phrase is not really an error but we don't want // to proceed to the Action Class if (_phrase == null || _phrase.length() == 0) { errors.add("error", new ActionMessage("error.nosearch")); } // If this is not the first time so update the AC type selections. if (_firstTime.charAt(0) == 'N') { for(int i = 0; i < _types.length; ++i) { _types[i] = (request_.getParameter("restrict" + i) != null); } } else { _firstTime = "N"; } // Set the attributes for proper display on the UI Options. for( int i = 0; i < _types.length; ++i) { if (_types[i]) request_.setAttribute("restrict" + i, "Y"); } // Return return errors; } private static final long serialVersionUID = 88840366374682878L; private boolean _excludeRetired; private boolean _excludeTest; private boolean _excludeTrain; private String _phrase; private int _limit; private SearchMatch _matching; private int _score; private String _displayOptions; private boolean[] _types; private String _firstTime; private String _footer; }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.backup.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Contains detailed information about a copy job. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/backup-2018-11-15/CopyJob" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CopyJob implements Serializable, Cloneable, StructuredPojo { /** * <p> * The account ID that owns the copy job. * </p> */ private String accountId; /** * <p> * Uniquely identifies a copy job. * </p> */ private String copyJobId; /** * <p> * An Amazon Resource Name (ARN) that uniquely identifies a source copy vault; for example, * <code>arn:aws:backup:us-east-1:123456789012:vault:aBackupVault</code>. * </p> */ private String sourceBackupVaultArn; /** * <p> * An ARN that uniquely identifies a source recovery point; for example, * <code>arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45</code>. * </p> */ private String sourceRecoveryPointArn; /** * <p> * An Amazon Resource Name (ARN) that uniquely identifies a destination copy vault; for example, * <code>arn:aws:backup:us-east-1:123456789012:vault:aBackupVault</code>. * </p> */ private String destinationBackupVaultArn; /** * <p> * An ARN that uniquely identifies a destination recovery point; for example, * <code>arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45</code>. * </p> */ private String destinationRecoveryPointArn; /** * <p> * The Amazon Web Services resource to be copied; for example, an Amazon Elastic Block Store (Amazon EBS) volume or * an Amazon Relational Database Service (Amazon RDS) database. * </p> */ private String resourceArn; /** * <p> * The date and time a copy job is created, in Unix format and Coordinated Universal Time (UTC). The value of * <code>CreationDate</code> is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, * January 26, 2018 12:11:30.087 AM. * </p> */ private java.util.Date creationDate; /** * <p> * The date and time a copy job is completed, in Unix format and Coordinated Universal Time (UTC). The value of * <code>CompletionDate</code> is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, * January 26, 2018 12:11:30.087 AM. * </p> */ private java.util.Date completionDate; /** * <p> * The current state of a copy job. * </p> */ private String state; /** * <p> * A detailed message explaining the status of the job to copy a resource. * </p> */ private String statusMessage; /** * <p> * The size, in bytes, of a copy job. * </p> */ private Long backupSizeInBytes; /** * <p> * Specifies the IAM role ARN used to copy the target recovery point; for example, * <code>arn:aws:iam::123456789012:role/S3Access</code>. * </p> */ private String iamRoleArn; private RecoveryPointCreator createdBy; /** * <p> * The type of Amazon Web Services resource to be copied; for example, an Amazon Elastic Block Store (Amazon EBS) * volume or an Amazon Relational Database Service (Amazon RDS) database. * </p> */ private String resourceType; /** * <p> * The account ID that owns the copy job. * </p> * * @param accountId * The account ID that owns the copy job. */ public void setAccountId(String accountId) { this.accountId = accountId; } /** * <p> * The account ID that owns the copy job. * </p> * * @return The account ID that owns the copy job. */ public String getAccountId() { return this.accountId; } /** * <p> * The account ID that owns the copy job. * </p> * * @param accountId * The account ID that owns the copy job. * @return Returns a reference to this object so that method calls can be chained together. */ public CopyJob withAccountId(String accountId) { setAccountId(accountId); return this; } /** * <p> * Uniquely identifies a copy job. * </p> * * @param copyJobId * Uniquely identifies a copy job. */ public void setCopyJobId(String copyJobId) { this.copyJobId = copyJobId; } /** * <p> * Uniquely identifies a copy job. * </p> * * @return Uniquely identifies a copy job. */ public String getCopyJobId() { return this.copyJobId; } /** * <p> * Uniquely identifies a copy job. * </p> * * @param copyJobId * Uniquely identifies a copy job. * @return Returns a reference to this object so that method calls can be chained together. */ public CopyJob withCopyJobId(String copyJobId) { setCopyJobId(copyJobId); return this; } /** * <p> * An Amazon Resource Name (ARN) that uniquely identifies a source copy vault; for example, * <code>arn:aws:backup:us-east-1:123456789012:vault:aBackupVault</code>. * </p> * * @param sourceBackupVaultArn * An Amazon Resource Name (ARN) that uniquely identifies a source copy vault; for example, * <code>arn:aws:backup:us-east-1:123456789012:vault:aBackupVault</code>. */ public void setSourceBackupVaultArn(String sourceBackupVaultArn) { this.sourceBackupVaultArn = sourceBackupVaultArn; } /** * <p> * An Amazon Resource Name (ARN) that uniquely identifies a source copy vault; for example, * <code>arn:aws:backup:us-east-1:123456789012:vault:aBackupVault</code>. * </p> * * @return An Amazon Resource Name (ARN) that uniquely identifies a source copy vault; for example, * <code>arn:aws:backup:us-east-1:123456789012:vault:aBackupVault</code>. */ public String getSourceBackupVaultArn() { return this.sourceBackupVaultArn; } /** * <p> * An Amazon Resource Name (ARN) that uniquely identifies a source copy vault; for example, * <code>arn:aws:backup:us-east-1:123456789012:vault:aBackupVault</code>. * </p> * * @param sourceBackupVaultArn * An Amazon Resource Name (ARN) that uniquely identifies a source copy vault; for example, * <code>arn:aws:backup:us-east-1:123456789012:vault:aBackupVault</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public CopyJob withSourceBackupVaultArn(String sourceBackupVaultArn) { setSourceBackupVaultArn(sourceBackupVaultArn); return this; } /** * <p> * An ARN that uniquely identifies a source recovery point; for example, * <code>arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45</code>. * </p> * * @param sourceRecoveryPointArn * An ARN that uniquely identifies a source recovery point; for example, * <code>arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45</code>. */ public void setSourceRecoveryPointArn(String sourceRecoveryPointArn) { this.sourceRecoveryPointArn = sourceRecoveryPointArn; } /** * <p> * An ARN that uniquely identifies a source recovery point; for example, * <code>arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45</code>. * </p> * * @return An ARN that uniquely identifies a source recovery point; for example, * <code>arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45</code>. */ public String getSourceRecoveryPointArn() { return this.sourceRecoveryPointArn; } /** * <p> * An ARN that uniquely identifies a source recovery point; for example, * <code>arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45</code>. * </p> * * @param sourceRecoveryPointArn * An ARN that uniquely identifies a source recovery point; for example, * <code>arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public CopyJob withSourceRecoveryPointArn(String sourceRecoveryPointArn) { setSourceRecoveryPointArn(sourceRecoveryPointArn); return this; } /** * <p> * An Amazon Resource Name (ARN) that uniquely identifies a destination copy vault; for example, * <code>arn:aws:backup:us-east-1:123456789012:vault:aBackupVault</code>. * </p> * * @param destinationBackupVaultArn * An Amazon Resource Name (ARN) that uniquely identifies a destination copy vault; for example, * <code>arn:aws:backup:us-east-1:123456789012:vault:aBackupVault</code>. */ public void setDestinationBackupVaultArn(String destinationBackupVaultArn) { this.destinationBackupVaultArn = destinationBackupVaultArn; } /** * <p> * An Amazon Resource Name (ARN) that uniquely identifies a destination copy vault; for example, * <code>arn:aws:backup:us-east-1:123456789012:vault:aBackupVault</code>. * </p> * * @return An Amazon Resource Name (ARN) that uniquely identifies a destination copy vault; for example, * <code>arn:aws:backup:us-east-1:123456789012:vault:aBackupVault</code>. */ public String getDestinationBackupVaultArn() { return this.destinationBackupVaultArn; } /** * <p> * An Amazon Resource Name (ARN) that uniquely identifies a destination copy vault; for example, * <code>arn:aws:backup:us-east-1:123456789012:vault:aBackupVault</code>. * </p> * * @param destinationBackupVaultArn * An Amazon Resource Name (ARN) that uniquely identifies a destination copy vault; for example, * <code>arn:aws:backup:us-east-1:123456789012:vault:aBackupVault</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public CopyJob withDestinationBackupVaultArn(String destinationBackupVaultArn) { setDestinationBackupVaultArn(destinationBackupVaultArn); return this; } /** * <p> * An ARN that uniquely identifies a destination recovery point; for example, * <code>arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45</code>. * </p> * * @param destinationRecoveryPointArn * An ARN that uniquely identifies a destination recovery point; for example, * <code>arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45</code>. */ public void setDestinationRecoveryPointArn(String destinationRecoveryPointArn) { this.destinationRecoveryPointArn = destinationRecoveryPointArn; } /** * <p> * An ARN that uniquely identifies a destination recovery point; for example, * <code>arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45</code>. * </p> * * @return An ARN that uniquely identifies a destination recovery point; for example, * <code>arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45</code>. */ public String getDestinationRecoveryPointArn() { return this.destinationRecoveryPointArn; } /** * <p> * An ARN that uniquely identifies a destination recovery point; for example, * <code>arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45</code>. * </p> * * @param destinationRecoveryPointArn * An ARN that uniquely identifies a destination recovery point; for example, * <code>arn:aws:backup:us-east-1:123456789012:recovery-point:1EB3B5E7-9EB0-435A-A80B-108B488B0D45</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public CopyJob withDestinationRecoveryPointArn(String destinationRecoveryPointArn) { setDestinationRecoveryPointArn(destinationRecoveryPointArn); return this; } /** * <p> * The Amazon Web Services resource to be copied; for example, an Amazon Elastic Block Store (Amazon EBS) volume or * an Amazon Relational Database Service (Amazon RDS) database. * </p> * * @param resourceArn * The Amazon Web Services resource to be copied; for example, an Amazon Elastic Block Store (Amazon EBS) * volume or an Amazon Relational Database Service (Amazon RDS) database. */ public void setResourceArn(String resourceArn) { this.resourceArn = resourceArn; } /** * <p> * The Amazon Web Services resource to be copied; for example, an Amazon Elastic Block Store (Amazon EBS) volume or * an Amazon Relational Database Service (Amazon RDS) database. * </p> * * @return The Amazon Web Services resource to be copied; for example, an Amazon Elastic Block Store (Amazon EBS) * volume or an Amazon Relational Database Service (Amazon RDS) database. */ public String getResourceArn() { return this.resourceArn; } /** * <p> * The Amazon Web Services resource to be copied; for example, an Amazon Elastic Block Store (Amazon EBS) volume or * an Amazon Relational Database Service (Amazon RDS) database. * </p> * * @param resourceArn * The Amazon Web Services resource to be copied; for example, an Amazon Elastic Block Store (Amazon EBS) * volume or an Amazon Relational Database Service (Amazon RDS) database. * @return Returns a reference to this object so that method calls can be chained together. */ public CopyJob withResourceArn(String resourceArn) { setResourceArn(resourceArn); return this; } /** * <p> * The date and time a copy job is created, in Unix format and Coordinated Universal Time (UTC). The value of * <code>CreationDate</code> is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, * January 26, 2018 12:11:30.087 AM. * </p> * * @param creationDate * The date and time a copy job is created, in Unix format and Coordinated Universal Time (UTC). The value of * <code>CreationDate</code> is accurate to milliseconds. For example, the value 1516925490.087 represents * Friday, January 26, 2018 12:11:30.087 AM. */ public void setCreationDate(java.util.Date creationDate) { this.creationDate = creationDate; } /** * <p> * The date and time a copy job is created, in Unix format and Coordinated Universal Time (UTC). The value of * <code>CreationDate</code> is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, * January 26, 2018 12:11:30.087 AM. * </p> * * @return The date and time a copy job is created, in Unix format and Coordinated Universal Time (UTC). The value * of <code>CreationDate</code> is accurate to milliseconds. For example, the value 1516925490.087 * represents Friday, January 26, 2018 12:11:30.087 AM. */ public java.util.Date getCreationDate() { return this.creationDate; } /** * <p> * The date and time a copy job is created, in Unix format and Coordinated Universal Time (UTC). The value of * <code>CreationDate</code> is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, * January 26, 2018 12:11:30.087 AM. * </p> * * @param creationDate * The date and time a copy job is created, in Unix format and Coordinated Universal Time (UTC). The value of * <code>CreationDate</code> is accurate to milliseconds. For example, the value 1516925490.087 represents * Friday, January 26, 2018 12:11:30.087 AM. * @return Returns a reference to this object so that method calls can be chained together. */ public CopyJob withCreationDate(java.util.Date creationDate) { setCreationDate(creationDate); return this; } /** * <p> * The date and time a copy job is completed, in Unix format and Coordinated Universal Time (UTC). The value of * <code>CompletionDate</code> is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, * January 26, 2018 12:11:30.087 AM. * </p> * * @param completionDate * The date and time a copy job is completed, in Unix format and Coordinated Universal Time (UTC). The value * of <code>CompletionDate</code> is accurate to milliseconds. For example, the value 1516925490.087 * represents Friday, January 26, 2018 12:11:30.087 AM. */ public void setCompletionDate(java.util.Date completionDate) { this.completionDate = completionDate; } /** * <p> * The date and time a copy job is completed, in Unix format and Coordinated Universal Time (UTC). The value of * <code>CompletionDate</code> is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, * January 26, 2018 12:11:30.087 AM. * </p> * * @return The date and time a copy job is completed, in Unix format and Coordinated Universal Time (UTC). The value * of <code>CompletionDate</code> is accurate to milliseconds. For example, the value 1516925490.087 * represents Friday, January 26, 2018 12:11:30.087 AM. */ public java.util.Date getCompletionDate() { return this.completionDate; } /** * <p> * The date and time a copy job is completed, in Unix format and Coordinated Universal Time (UTC). The value of * <code>CompletionDate</code> is accurate to milliseconds. For example, the value 1516925490.087 represents Friday, * January 26, 2018 12:11:30.087 AM. * </p> * * @param completionDate * The date and time a copy job is completed, in Unix format and Coordinated Universal Time (UTC). The value * of <code>CompletionDate</code> is accurate to milliseconds. For example, the value 1516925490.087 * represents Friday, January 26, 2018 12:11:30.087 AM. * @return Returns a reference to this object so that method calls can be chained together. */ public CopyJob withCompletionDate(java.util.Date completionDate) { setCompletionDate(completionDate); return this; } /** * <p> * The current state of a copy job. * </p> * * @param state * The current state of a copy job. * @see CopyJobState */ public void setState(String state) { this.state = state; } /** * <p> * The current state of a copy job. * </p> * * @return The current state of a copy job. * @see CopyJobState */ public String getState() { return this.state; } /** * <p> * The current state of a copy job. * </p> * * @param state * The current state of a copy job. * @return Returns a reference to this object so that method calls can be chained together. * @see CopyJobState */ public CopyJob withState(String state) { setState(state); return this; } /** * <p> * The current state of a copy job. * </p> * * @param state * The current state of a copy job. * @return Returns a reference to this object so that method calls can be chained together. * @see CopyJobState */ public CopyJob withState(CopyJobState state) { this.state = state.toString(); return this; } /** * <p> * A detailed message explaining the status of the job to copy a resource. * </p> * * @param statusMessage * A detailed message explaining the status of the job to copy a resource. */ public void setStatusMessage(String statusMessage) { this.statusMessage = statusMessage; } /** * <p> * A detailed message explaining the status of the job to copy a resource. * </p> * * @return A detailed message explaining the status of the job to copy a resource. */ public String getStatusMessage() { return this.statusMessage; } /** * <p> * A detailed message explaining the status of the job to copy a resource. * </p> * * @param statusMessage * A detailed message explaining the status of the job to copy a resource. * @return Returns a reference to this object so that method calls can be chained together. */ public CopyJob withStatusMessage(String statusMessage) { setStatusMessage(statusMessage); return this; } /** * <p> * The size, in bytes, of a copy job. * </p> * * @param backupSizeInBytes * The size, in bytes, of a copy job. */ public void setBackupSizeInBytes(Long backupSizeInBytes) { this.backupSizeInBytes = backupSizeInBytes; } /** * <p> * The size, in bytes, of a copy job. * </p> * * @return The size, in bytes, of a copy job. */ public Long getBackupSizeInBytes() { return this.backupSizeInBytes; } /** * <p> * The size, in bytes, of a copy job. * </p> * * @param backupSizeInBytes * The size, in bytes, of a copy job. * @return Returns a reference to this object so that method calls can be chained together. */ public CopyJob withBackupSizeInBytes(Long backupSizeInBytes) { setBackupSizeInBytes(backupSizeInBytes); return this; } /** * <p> * Specifies the IAM role ARN used to copy the target recovery point; for example, * <code>arn:aws:iam::123456789012:role/S3Access</code>. * </p> * * @param iamRoleArn * Specifies the IAM role ARN used to copy the target recovery point; for example, * <code>arn:aws:iam::123456789012:role/S3Access</code>. */ public void setIamRoleArn(String iamRoleArn) { this.iamRoleArn = iamRoleArn; } /** * <p> * Specifies the IAM role ARN used to copy the target recovery point; for example, * <code>arn:aws:iam::123456789012:role/S3Access</code>. * </p> * * @return Specifies the IAM role ARN used to copy the target recovery point; for example, * <code>arn:aws:iam::123456789012:role/S3Access</code>. */ public String getIamRoleArn() { return this.iamRoleArn; } /** * <p> * Specifies the IAM role ARN used to copy the target recovery point; for example, * <code>arn:aws:iam::123456789012:role/S3Access</code>. * </p> * * @param iamRoleArn * Specifies the IAM role ARN used to copy the target recovery point; for example, * <code>arn:aws:iam::123456789012:role/S3Access</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public CopyJob withIamRoleArn(String iamRoleArn) { setIamRoleArn(iamRoleArn); return this; } /** * @param createdBy */ public void setCreatedBy(RecoveryPointCreator createdBy) { this.createdBy = createdBy; } /** * @return */ public RecoveryPointCreator getCreatedBy() { return this.createdBy; } /** * @param createdBy * @return Returns a reference to this object so that method calls can be chained together. */ public CopyJob withCreatedBy(RecoveryPointCreator createdBy) { setCreatedBy(createdBy); return this; } /** * <p> * The type of Amazon Web Services resource to be copied; for example, an Amazon Elastic Block Store (Amazon EBS) * volume or an Amazon Relational Database Service (Amazon RDS) database. * </p> * * @param resourceType * The type of Amazon Web Services resource to be copied; for example, an Amazon Elastic Block Store (Amazon * EBS) volume or an Amazon Relational Database Service (Amazon RDS) database. */ public void setResourceType(String resourceType) { this.resourceType = resourceType; } /** * <p> * The type of Amazon Web Services resource to be copied; for example, an Amazon Elastic Block Store (Amazon EBS) * volume or an Amazon Relational Database Service (Amazon RDS) database. * </p> * * @return The type of Amazon Web Services resource to be copied; for example, an Amazon Elastic Block Store (Amazon * EBS) volume or an Amazon Relational Database Service (Amazon RDS) database. */ public String getResourceType() { return this.resourceType; } /** * <p> * The type of Amazon Web Services resource to be copied; for example, an Amazon Elastic Block Store (Amazon EBS) * volume or an Amazon Relational Database Service (Amazon RDS) database. * </p> * * @param resourceType * The type of Amazon Web Services resource to be copied; for example, an Amazon Elastic Block Store (Amazon * EBS) volume or an Amazon Relational Database Service (Amazon RDS) database. * @return Returns a reference to this object so that method calls can be chained together. */ public CopyJob withResourceType(String resourceType) { setResourceType(resourceType); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getAccountId() != null) sb.append("AccountId: ").append(getAccountId()).append(","); if (getCopyJobId() != null) sb.append("CopyJobId: ").append(getCopyJobId()).append(","); if (getSourceBackupVaultArn() != null) sb.append("SourceBackupVaultArn: ").append(getSourceBackupVaultArn()).append(","); if (getSourceRecoveryPointArn() != null) sb.append("SourceRecoveryPointArn: ").append(getSourceRecoveryPointArn()).append(","); if (getDestinationBackupVaultArn() != null) sb.append("DestinationBackupVaultArn: ").append(getDestinationBackupVaultArn()).append(","); if (getDestinationRecoveryPointArn() != null) sb.append("DestinationRecoveryPointArn: ").append(getDestinationRecoveryPointArn()).append(","); if (getResourceArn() != null) sb.append("ResourceArn: ").append(getResourceArn()).append(","); if (getCreationDate() != null) sb.append("CreationDate: ").append(getCreationDate()).append(","); if (getCompletionDate() != null) sb.append("CompletionDate: ").append(getCompletionDate()).append(","); if (getState() != null) sb.append("State: ").append(getState()).append(","); if (getStatusMessage() != null) sb.append("StatusMessage: ").append(getStatusMessage()).append(","); if (getBackupSizeInBytes() != null) sb.append("BackupSizeInBytes: ").append(getBackupSizeInBytes()).append(","); if (getIamRoleArn() != null) sb.append("IamRoleArn: ").append(getIamRoleArn()).append(","); if (getCreatedBy() != null) sb.append("CreatedBy: ").append(getCreatedBy()).append(","); if (getResourceType() != null) sb.append("ResourceType: ").append(getResourceType()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CopyJob == false) return false; CopyJob other = (CopyJob) obj; if (other.getAccountId() == null ^ this.getAccountId() == null) return false; if (other.getAccountId() != null && other.getAccountId().equals(this.getAccountId()) == false) return false; if (other.getCopyJobId() == null ^ this.getCopyJobId() == null) return false; if (other.getCopyJobId() != null && other.getCopyJobId().equals(this.getCopyJobId()) == false) return false; if (other.getSourceBackupVaultArn() == null ^ this.getSourceBackupVaultArn() == null) return false; if (other.getSourceBackupVaultArn() != null && other.getSourceBackupVaultArn().equals(this.getSourceBackupVaultArn()) == false) return false; if (other.getSourceRecoveryPointArn() == null ^ this.getSourceRecoveryPointArn() == null) return false; if (other.getSourceRecoveryPointArn() != null && other.getSourceRecoveryPointArn().equals(this.getSourceRecoveryPointArn()) == false) return false; if (other.getDestinationBackupVaultArn() == null ^ this.getDestinationBackupVaultArn() == null) return false; if (other.getDestinationBackupVaultArn() != null && other.getDestinationBackupVaultArn().equals(this.getDestinationBackupVaultArn()) == false) return false; if (other.getDestinationRecoveryPointArn() == null ^ this.getDestinationRecoveryPointArn() == null) return false; if (other.getDestinationRecoveryPointArn() != null && other.getDestinationRecoveryPointArn().equals(this.getDestinationRecoveryPointArn()) == false) return false; if (other.getResourceArn() == null ^ this.getResourceArn() == null) return false; if (other.getResourceArn() != null && other.getResourceArn().equals(this.getResourceArn()) == false) return false; if (other.getCreationDate() == null ^ this.getCreationDate() == null) return false; if (other.getCreationDate() != null && other.getCreationDate().equals(this.getCreationDate()) == false) return false; if (other.getCompletionDate() == null ^ this.getCompletionDate() == null) return false; if (other.getCompletionDate() != null && other.getCompletionDate().equals(this.getCompletionDate()) == false) return false; if (other.getState() == null ^ this.getState() == null) return false; if (other.getState() != null && other.getState().equals(this.getState()) == false) return false; if (other.getStatusMessage() == null ^ this.getStatusMessage() == null) return false; if (other.getStatusMessage() != null && other.getStatusMessage().equals(this.getStatusMessage()) == false) return false; if (other.getBackupSizeInBytes() == null ^ this.getBackupSizeInBytes() == null) return false; if (other.getBackupSizeInBytes() != null && other.getBackupSizeInBytes().equals(this.getBackupSizeInBytes()) == false) return false; if (other.getIamRoleArn() == null ^ this.getIamRoleArn() == null) return false; if (other.getIamRoleArn() != null && other.getIamRoleArn().equals(this.getIamRoleArn()) == false) return false; if (other.getCreatedBy() == null ^ this.getCreatedBy() == null) return false; if (other.getCreatedBy() != null && other.getCreatedBy().equals(this.getCreatedBy()) == false) return false; if (other.getResourceType() == null ^ this.getResourceType() == null) return false; if (other.getResourceType() != null && other.getResourceType().equals(this.getResourceType()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getAccountId() == null) ? 0 : getAccountId().hashCode()); hashCode = prime * hashCode + ((getCopyJobId() == null) ? 0 : getCopyJobId().hashCode()); hashCode = prime * hashCode + ((getSourceBackupVaultArn() == null) ? 0 : getSourceBackupVaultArn().hashCode()); hashCode = prime * hashCode + ((getSourceRecoveryPointArn() == null) ? 0 : getSourceRecoveryPointArn().hashCode()); hashCode = prime * hashCode + ((getDestinationBackupVaultArn() == null) ? 0 : getDestinationBackupVaultArn().hashCode()); hashCode = prime * hashCode + ((getDestinationRecoveryPointArn() == null) ? 0 : getDestinationRecoveryPointArn().hashCode()); hashCode = prime * hashCode + ((getResourceArn() == null) ? 0 : getResourceArn().hashCode()); hashCode = prime * hashCode + ((getCreationDate() == null) ? 0 : getCreationDate().hashCode()); hashCode = prime * hashCode + ((getCompletionDate() == null) ? 0 : getCompletionDate().hashCode()); hashCode = prime * hashCode + ((getState() == null) ? 0 : getState().hashCode()); hashCode = prime * hashCode + ((getStatusMessage() == null) ? 0 : getStatusMessage().hashCode()); hashCode = prime * hashCode + ((getBackupSizeInBytes() == null) ? 0 : getBackupSizeInBytes().hashCode()); hashCode = prime * hashCode + ((getIamRoleArn() == null) ? 0 : getIamRoleArn().hashCode()); hashCode = prime * hashCode + ((getCreatedBy() == null) ? 0 : getCreatedBy().hashCode()); hashCode = prime * hashCode + ((getResourceType() == null) ? 0 : getResourceType().hashCode()); return hashCode; } @Override public CopyJob clone() { try { return (CopyJob) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.backup.model.transform.CopyJobMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package java.nio; import com.google.j2objc.annotations.Weak; import java.io.FileDescriptor; import java.io.IOException; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; import java.net.SocketAddress; import java.net.SocketOption; import java.net.SocketTimeoutException; import java.net.StandardSocketOptions; import java.nio.channels.AlreadyBoundException; import java.nio.channels.ClosedChannelException; import java.nio.channels.IllegalBlockingModeException; import java.nio.channels.NotYetBoundException; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.nio.channels.UnsupportedAddressTypeException; import java.nio.channels.spi.SelectorProvider; import java.util.Set; import libcore.io.ErrnoException; import libcore.io.IoUtils; import static libcore.io.OsConstants.*; /** * The default ServerSocketChannel. */ final class ServerSocketChannelImpl extends ServerSocketChannel implements FileDescriptorChannel { private final ServerSocketAdapter socket; private final Object acceptLock = new Object(); public ServerSocketChannelImpl(SelectorProvider sp) throws IOException { super(sp); this.socket = new ServerSocketAdapter(this); } @Override public ServerSocket socket() { return socket; } /** @hide Until ready for a public API change */ @Override public final ServerSocketChannel bind(SocketAddress localAddr, int backlog) throws IOException { if (!isOpen()) { throw new ClosedChannelException(); } if (socket.isBound()) { throw new AlreadyBoundException(); } if (localAddr != null && !(localAddr instanceof InetSocketAddress)) { throw new UnsupportedAddressTypeException(); } socket.bind(localAddr, backlog); return this; } /** @hide Until ready for a public API change */ @Override public SocketAddress getLocalAddress() throws IOException { if (!isOpen()) { throw new ClosedChannelException(); } return socket.getLocalSocketAddress(); } /** @hide Until ready for a public API change */ @Override public <T> T getOption(SocketOption<T> option) throws IOException { return ChannelUtils.getSocketOption(this, StandardSocketOptions.SERVER_SOCKET_OPTIONS, option); } /** @hide Until ready for a public API change */ @Override public <T> ServerSocketChannel setOption(SocketOption<T> option, T value) throws IOException { ChannelUtils.setSocketOption(this, StandardSocketOptions.SERVER_SOCKET_OPTIONS, option, value); return this; } /** @hide Until ready for a public API change */ @Override public Set<SocketOption<?>> supportedOptions() { return StandardSocketOptions.SERVER_SOCKET_OPTIONS; } @Override public SocketChannel accept() throws IOException { if (!isOpen()) { throw new ClosedChannelException(); } if (!socket.isBound()) { throw new NotYetBoundException(); } // Create an empty socket channel. This will be populated by ServerSocketAdapter.implAccept. SocketChannelImpl result = new SocketChannelImpl(provider(), false); try { begin(); synchronized (acceptLock) { try { socket.implAccept(result); } catch (SocketTimeoutException e) { if (shouldThrowSocketTimeoutExceptionFromAccept(e)) { throw e; } // Otherwise, this is a non-blocking socket and there's nothing ready, so we'll // fall through and return null. } } } finally { end(result.isConnected()); } return result.isConnected() ? result : null; } private boolean shouldThrowSocketTimeoutExceptionFromAccept(SocketTimeoutException e) { if (isBlocking()) { return true; } Throwable cause = e.getCause(); if (cause instanceof ErrnoException) { if (((ErrnoException) cause).errno == EAGAIN) { return false; } } return true; } @Override protected void implConfigureBlocking(boolean blocking) throws IOException { IoUtils.setBlocking(socket.getFD$(), blocking); } @Override synchronized protected void implCloseSelectableChannel() throws IOException { if (!socket.isClosed()) { socket.close(); } } @Override public FileDescriptor getFD() { return socket.getFD$(); } private static class ServerSocketAdapter extends ServerSocket { @Weak private final ServerSocketChannelImpl channelImpl; ServerSocketAdapter(ServerSocketChannelImpl aChannelImpl) throws IOException { this.channelImpl = aChannelImpl; } @Override public Socket accept() throws IOException { if (!isBound()) { throw new IllegalBlockingModeException(); } SocketChannel sc = channelImpl.accept(); if (sc == null) { throw new IllegalBlockingModeException(); } return sc.socket(); } public Socket implAccept(SocketChannelImpl clientSocketChannel) throws IOException { Socket clientSocket = clientSocketChannel.socket(); boolean connectOK = false; try { synchronized (this) { super.implAccept(clientSocket); // Sync the client socket's associated channel state with the Socket and OS. InetSocketAddress remoteAddress = new InetSocketAddress( clientSocket.getInetAddress(), clientSocket.getPort()); clientSocketChannel.onAccept(remoteAddress, false /* updateSocketState */); } connectOK = true; } finally { if (!connectOK) { clientSocket.close(); } } return clientSocket; } @Override public ServerSocketChannel getChannel() { return channelImpl; } @Override public void close() throws IOException { synchronized (channelImpl) { super.close(); if (channelImpl.isOpen()) { channelImpl.close(); } } } private FileDescriptor getFD$() { return super.getImpl$().getFD$(); } } }
package fr.inria.verveine.extractor.fortran; import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.StringTokenizer; import fortran.ofp.parser.java.FortranStream; /** * Heavily inspired (i.e. copied) from OpenFortranProject {@link FortranStream} * Had to redefine it because (1) wanted to allow creating a stream from a String; * (2) too many private methods in the superclass that could not be easily used in the constructor redefinition. * Unfortunately, this implies duplicating here a lot of the code (attributes, methods) of the super class :-( */ public class VerveineFortranStream extends FortranStream { private int sourceForm; protected Map<String,String> macros; /** * A simple tree structure used to analyse the boolean expression in #if conditions.<br> * The constructor is responsible for doing a (simplified) parsing of the condition. * <ul> * <li>Only two levels of expressions: "&lt;comparison&gt;" (of a &lt;macro&gt; and a &lt;value&gt;), or * boolean expression: "&lt;comparison&gt; && &lt;comparison&gt;" * <li>Only "||" and "&&" boolean operators allowed * <li>There is no precedence between boolean operators * <li>Only "!=" and "==" comparisons allowed * <li>Comparisons are always &lt;macro&gt; &lt;comparison&gt; &lt;value&gt; * </ul> */ public static class StringTree { final static int BOOLEAN = 1; final static int COMPARISON = 2; final static int TERM = 3; private String node; private StringTree left; private StringTree right; StringTree(String expr) { this(expr, BOOLEAN); } StringTree(String expr, int level) { if (level == TERM) { node = expr; } else if (level == COMPARISON) { initializeAsComparison(expr); } else { // level == BOOLEAN initializeAsBooleanCondition(expr); } } protected void initializeAsBooleanCondition(String expr) { StringTokenizer tokenizer = new StringTokenizer(expr, "|&", /*returnDelims*/true); String term = tokenizer.nextToken().trim(); if (tokenizer.hasMoreTokens()) { // there may be many more ... while (tokenizer.hasMoreTokens()) { // node is actually a comparison, so decompose it and put it as left child setLeft( new StringTree(term, COMPARISON)); // get the operator (in two separate tokens :-( ) node = tokenizer.nextToken().trim()+tokenizer.nextToken().trim(); // and parse the rest of the boolean expression setRight( new StringTree(tokenizer.nextToken().trim(), BOOLEAN)); } } else { initializeAsComparison(term); } } protected void initializeAsComparison(String expr) { StringTokenizer tokenizer = new StringTokenizer(expr, "!=", /*returnDelims*/true); String macro = tokenizer.nextToken().trim(); if (tokenizer.hasMoreTokens()) { // there is other tokens, so it is a comparison: <macro> <op> <value> // tokenizer separates the two characters of operator in two tokens :-( node = tokenizer.nextToken().trim()+tokenizer.nextToken().trim(); setLeft( new StringTree(macro, TERM)); setRight( new StringTree(tokenizer.nextToken().trim(), TERM)); } else { // no other tokens, it was just the macro alone node = macro; } } public String getValue() { return node; } public StringTree getLeft() { return left; } public void setLeft(StringTree left) { this.left = left; } public StringTree getRight() { return right; } public void setRight(StringTree right) { this.right = right; } } public VerveineFortranStream(Map<String,String> macros, String filename) throws IOException { super(filename); this.macros = macros; blankPreprocessedCode(0); } public VerveineFortranStream(Map<String,String> macros, String content, int sourceForm) throws IOException { super("verveineStubFile.f90"); // calls FortranStream constructor and initializes it correctly (but with an empty file) this.macros = macros; // now sets the right content from the input string to the stream // Note: FortranStream defines filename as private // but inherits (from antlr.FileStream) fileName (capital N) !! // can't modify filename, but can at least modify fileName this.fileName = "-no-input-file-"; this.sourceForm = sourceForm; this.data = content.toCharArray(); this.n = content.length(); convertInputBuffer(); blankPreprocessedCode(0); } private void convertInputBuffer() { char[] newData = new char[n+2]; int from = 0, to = 0; while( from < n ) { if (super.data[from] != '\r') { newData[to++] = super.data[from]; } else { newData[to++] = '\n'; // replace '\r' if (from+1 < super.n && super.data[from+1] == '\n') { from += 1; // effectively skip the '\r' character } } from += 1; } // append two extra LFs newData[to++] = '\n'; newData[to++] = '\n'; // fill any extra slots with blanks while( to < n+2 ) { newData[to++] = ' '; } data = newData; if (this.sourceForm == FortranLanguage.FIXED_FORM) { convertFixedFormInputBuffer(); } else { convertFreeFormInputBuffer(); } } private int blankPreprocessedCode(int i) { int col = 0; for (; i < super.n; i++) { if ( (col == 0) && matchPreprocessLine(i, data) ) { String line = lineAsString(data,i); i = blankThisIfMacro(i, line); } else if (data[i] == '\n') { col=0; } else { col++; } } return i; } /** * checks that current line is a '#if' preprocessor statement. * If so, checks whether the macro has been defined with the correct value. * If not so blanks everything up to a corresponding '#endif' line */ protected int blankThisIfMacro(int i, String line) { if (line.startsWith("#if")) { StringTree expr = new StringTree(line.substring(3)); // line after the #if if (! evaluateCondition(expr)) { i = blankLinesUpToEndif(i+line.length()+1); } } else if (line.startsWith("#else")) { // we were not "blanking" the #if part, so we should blank the #else part i = blankLinesUpToEndif(i+line.length()+1); } return i; } protected boolean evaluateCondition(StringTree expr) { if (expr.getValue().equals("&&")) { return evaluateCondition(expr.getLeft()) && evaluateCondition(expr.getRight()); } else if (expr.getValue().equals("||")) { return evaluateCondition(expr.getLeft()) || evaluateCondition(expr.getRight()); } else { String macro = expr.getLeft().getValue(); String expected = expr.getRight().getValue(); String comparator = expr.getValue(); String actual = macros.get(macro); if (actual == null) { // macro is undefined, only true if comparator is "!=" return (comparator.equals("!=")); } else if (comparator.equals("==")) { return actual.equals(expected); } else { // (comparator.equals("!=")) return ! actual.equals(expected); } } } private String lineAsString(char[] buf, int start) { int eol=start; while ( (eol < n) && (buf[eol] != '\n') ) { eol++; } return new String( Arrays.copyOfRange(data, start, eol) ); } /* * up to "#endif" or "#else" */ private int blankLinesUpToEndif(int i) { int col = 0; while (i < n) { if (col == 0) { String line = lineAsString(data, i); if (line.startsWith("#endif") || line.startsWith("#else")) { return i+line.length(); } else if ( line.startsWith("#if")) { // recursive call to process inner #if instruction // but before make sure no other macro will match in this process Map<String, String> backup = macros; macros = new HashMap<>(); i = blankThisIfMacro(i, line); macros = backup; // skip end of the loop to restart a new line continue; } } if (data[i] == '\n') { col=0; i++; } else { data[i] = ' '; col++; i++; } } return i; } private void convertFreeFormInputBuffer() { // an integer "tuple" to hold i, count return values int [] index_count; // buffer for line comments and preprocessor lines StringBuffer comments = new StringBuffer(); char[] newData = new char[super.n]; boolean continuation = false; int count = 0; int col = 1; // 1 based for (int i = 0; i < super.n; i++) { int ii; // process column 1 special characters if (col == 1) { ii = consumePreprocessLine(i, data, comments); while (ii != i) { // preprocess line can't be added immediately because // could be in the middle of a continued line line += 1; i = ii; ii = consumePreprocessLine(i, data, comments); } ii = consumeFreeFormCommentLine(i, data, comments); while (ii != i) { // comment line can't be added immediately because // could be in the middle of a continued line line += 1; i = ii; ii = consumeFreeFormCommentLine(i, data, comments); } if (continuation) { // '&' may be first nonblank character in a line, // if so, skip over the continuation character if ((ii = skipFreeFormContinuationAtBegin(i, data)) != i) { col += ii - i; i = ii; } // process a string if it exists else if (matchFreeFormString(i, data)) { char quoteChar = data[i]; index_count = consumeFreeFormString(i, data, count, newData); ii = index_count[0]; count = index_count[1]; while (data[ii] == '&') { // string is continued across multiple lines line += 1; col += ii - i; i = ii; index_count = completeContinuedString(quoteChar, i, data, count, newData); ii = index_count[0]; count = index_count[1]; } col += ii - i; i = ii; } continuation = false; } else { // add any comments and preprocess lines since not in // the middle of a continued line if (comments.length() > 0) { count = consumeCommentLines(count, newData, comments); if (i >= super.n) { // this can occur if last line is a comment line continue; } } } } // process all columns > 1 else { // consume comment if it exists but retain '\n' if ((ii = consumeComment(i, data, comments)) != i) { count = consumeCommentLines(count, newData, comments); i = ii; } // remove continuation if it exists but retain '\n' else if (matchFreeFormContinuationAtEnd(i, data)) { ii = consumeFreeFormContinuationAtEnd(i, data, comments); continuation = true; i = ii; } // process a string if it exists but retain trailing quote char else if (matchFreeFormString(i, data)) { char quoteChar = data[i]; index_count = consumeFreeFormString(i, data, count, newData); ii = index_count[0]; count = index_count[1]; while (data[ii] == '&') { // string is continued across multiple lines line += 1; col += ii - i; i = ii; index_count = completeContinuedString(quoteChar, i, data, count, newData); ii = index_count[0]; count = index_count[1]; } col += ii - i; i = ii; } // Holleriths are matched after strings so Hollerith matching doesn't have // to worry about string, i.e, the string, "4HThis is a string". else { index_count = consumeHollerith(i, data, count, newData); ii = index_count[0]; count = index_count[1]; if (ii != i) { col += ii - i; i = ii; } } } // copy current character if (!continuation) { if (data[i] == '\n') { col = 1; line += 1; // copy comments that were caught up with continuation count = consumeCommentLines(count, newData, comments); } else { col += 1; } newData[count++] = data[i]; } else { // this line is to be continued // put a space instead of the & to keep token start/stop-indexes right newData[count++] = ' '; col = 1; } } // switch to new data buffer this.data = newData; this.n = count; } /** * All comments in the middle of continuation lines are moved to a location * immediately AFTER the continued line. */ private void convertFixedFormInputBuffer() { // an integer "tuple" to hold i, count return values int [] index_count; // buffer for line comments and preprocessor lines StringBuffer comments = new StringBuffer(); char[] newData = new char[super.n]; int count = 0; int col = 1; // 1 based for (int i = 0; i < super.n; i++) { int ii; // process column 1 special characters if (col == 1) { while ((ii = consumePreprocessLine(i, data, comments)) != i) { count = consumeCommentLines(count, newData, comments); line += 1; i = ii; } while ((ii = consumeFixedFormCommentLine(i, data, comments)) != i) { count = consumeCommentLines(count, newData, comments); line += 1; i = ii; } if (i >= super.n) { // this can occur if last line is a comment line continue; } // "expand" TABs by bumping to column 5 if (data[i] == '\t') { col = 5; // column 5 will pick up TAB character } } else if (col > 1 && col < 6) { // consume a comment if it exists but retain '\n' or EOF if (matchComment(i, data)) { i = consumeComment(i, data, comments); // can't add comments yet if the line is continued if (!matchFixedFormContinuation(i, data)) { count = consumeCommentLines(count, newData, comments); } } } else if (col == 6) { // Continuation checked at '\n' so no need to here, just pass the character. // If first line is a continuation it is an error so won't need to be // caught here. TODO - what about included files with continuation, legal? // but I think 0 in column 6 to start is legal (gfortran and ifort disagree) if (data[i] == '0') data[i] = ' '; } else { // consume a comment if it exists but retain '\n' or EOF if (matchComment(i, data)) { i = consumeComment(i, data, comments); // can't add comments yet if the line is continued if (!matchFixedFormContinuation(i, data)) { count = consumeCommentLines(count, newData, comments); } } // consume a string if it exists but retain trailing quote char else if (matchFixedFormString(i, data)) { ii = consumeFixedFormString(i, data, count, newData); count += ii - i; col += ii - i; i = ii; } // Holleriths are matched after strings so Hollerith matching doesn't have // to worry about strings, i.e, the string, "4HThis is a string". else { index_count = consumeHollerith(i, data, count, newData); ii = index_count[0]; count = index_count[1]; if (ii != i) { col += ii - i; i = ii; } } } while (data[i] == '\n' && matchFixedFormContinuation(i, data)) { i = consumeFixedFormContinuation(i, data, comments); } // copy current character newData[count++] = data[i]; col += 1; if (data[i] == '\n') { col = 1; line += 1; // copy comments that were caught up with continuation count = consumeCommentLines(count, newData, comments); } } // switch to new data buffer this.data = newData; this.n = count; } /** * Copy comment line and preprocessor lines to data buffer */ private int consumeCommentLines(int i, char [] newData, StringBuffer comments) { for(int ii = 0; ii < comments.length(); ii++) { newData[i++] = comments.charAt(ii); } comments.delete(0, comments.length()); return i; } /** * Return true if this character starts a comment */ private boolean matchComment(int i, char buf[]) { return (buf[i] == '!'); } /** * if a comment, copy comment to comments buffer excluding terminating '\n' character */ private int consumeComment(int i, char buf[], StringBuffer comments) { if (i < super.n && buf[i] == '!') { // found comment character, copy characters up to '\n' do { comments.append(buf[i++]); } while (i < super.n && buf[i] != '\n'); } return i; } /** * Return true if a comment line beginning with '!' is found */ private boolean matchFreeFormCommentLine(int i, char buf[]) { // skip over leading blank characters // TODO - what about TABS int i1 = i; while(i1 < super.n && buf[i1] == ' ') i1 += 1; if (i1 >= super.n) return false; if (buf[i1] == '!' || buf[i1] == '\n') return true; return false; } private int consumeFreeFormCommentLine(int i, char buf[], StringBuffer comments) { if (i >= super.n) return i; // skip over leading blank characters int i1 = i; while(i1 < super.n && buf[i1] == ' ') i1 += 1; // nothing to do if not a comment line if (i1 < super.n && buf[i1] != '!' && buf[i1] != '\n') return i; // copy leading blank characters for (int ii = 0; ii < i1-i; ii++) comments.append(' '); if (i1 == super.n) return super.n; if (buf[i1] == '\n') { // a comment line with only whitespace comments.append('\n'); i = i1+1; } else { i = processLineForCommentChar('!', i1, buf, comments); } return i; } /** * Return true if a fixed form comment line is found. * * Check for comment characters, 'C', '*', and '!' at the start of * a line. A blank line is also a comment line. */ private boolean matchFixedFormCommentLine(int i, char buf[]) { if (i >= super.n) return false; // first check for free form ('!' comments and blank character lines) if (matchFreeFormCommentLine(i, buf)) return true; // check for a normal comment line if (buf[i] == '*' || buf[i] == 'C' || buf[i] == 'c') return true; return false; } /** * Check for comment characters, 'C', '*', and '!' at start of * a line. A blank line is also a comment line. If a comment line is * found, copy the line comment to the comments buffer (without trailing * '\n', and return the position of the character after the '\n' character. */ private int consumeFixedFormCommentLine(int i, char buf[], StringBuffer comments) { if (i >= super.n) return i; // first check for free form ('!' comments and blank character lines) int ii = consumeFreeFormCommentLine(i, buf, comments); if (ii != i) return ii; // check for a normal comment line if (buf[i] == '*') ii = processLineForCommentChar('*', i, buf, comments); else if (buf[i] == 'C') ii = processLineForCommentChar('C', i, buf, comments); else if (buf[i] == 'c') ii = processLineForCommentChar('c', i, buf, comments); return ii; } /** * If character at i == c, skip to next line advancing past '\n', while * copying intervening characters to comments buffer. */ private int processLineForCommentChar(char c, int i, char buf[], StringBuffer comments) { if (i >= super.n) return i; if (buf[i] == c) { if (buf[i] == '*' || buf[i] == 'C' || buf[i] == 'c') { // replace by free form comment character comments.append('!'); } else { comments.append(buf[i]); } i += 1; // found character, copy rest of line while ( (i < n) && (buf[i] != '\n') ) { comments.append(buf[i++]); } if (i < n) { comments.append(buf[i++]); // copy '\n' also } } return i; } private boolean matchPreprocessLine(int i, char buf[]) { return (buf[i] == '#'); } private int consumePreprocessLine(int i, char buf[], StringBuffer comments) { return processLineForCommentChar('#', i, buf, comments); } /** * Return true if the current character is '&' */ private boolean matchFreeFormContinuationAtEnd(int i, char buf[]) { return (buf[i] == '&'); } /** * If the current character is '&', skip the '&' character and * copy all remaining characters to the comments buffer, including * '\n', to retain possible comments following the continuation character. */ private int consumeFreeFormContinuationAtEnd(int i, char buf[], StringBuffer comments) { if (i >= super.n || buf[i] != '&') return i; i += 1; // skip the continuation character while (i < super.n && buf[i] != '\n') { comments.append(buf[i++]); } if (i < super.n) { comments.append(buf[i++]); // copy '\n' also } return i-1; // retain the '\n' } /** * Check to see if there is a continuation character as '&' * the first non-blank character in a line. If there is, return * the position after the '&' character. */ private int skipFreeFormContinuationAtBegin(int i, char buf[]) { int ii = i; while (ii < super.n && buf[ii] == ' ' && buf[ii] != '&') ii += 1; if (buf[ii] == '&') i = ii + 1; return i; } /** * Called when at a '\n' character. Look ahead for continuation * character at column 6. There could be comment or preprocess * lines in between so have to skip over comment lines and if * they exist. * * The convention for a TAB character in columns 1..5 followed * by a digit 1..9 is a continuation line. If TAB + '0' the * '0' is ignored in the input stream. This follows DEC convention * (I believe) but is non standard Fortran. * * WARNING, don't go beyond length of stream, super.n */ private boolean matchFixedFormContinuation(int i, char buf[]) { int ii; // skip all preprocessor and comment lines // i += 1; while (matchPreprocessLine(i, buf) || matchFixedFormCommentLine(i, buf)) { i = findCharacter('\n', i, buf); if (buf[i] != '\n') return false; i += 1; } // search for TAB in columns 1..5, otherwise continued position will be ii ii = i; for (int j = 0; j < 5; j++) { if (buf[ii] == '\n') return false; if (buf[ii++] == '\t') { if (buf[ii] >= '1' && buf[ii] <= '9') { return true; } else { return false; } } } if (buf[ii] != '0' && buf[ii] != ' ') { return true; } return false; } /** * Called when at a '\n' character. Look ahead for continuation * character at column 6. There could be comment or preprocess * lines in between so have to search for comment lines and remove * them if they exist. * * The convention for a TAB character in columns 1..5 followed * by a digit 1..9 is a continuation line. If TAB + '0' the * '0' is ignored in the input stream. This follows DEC convention * (I believe) but is non standard Fortran. * * WARNING, don't go beyond length of stream, super.n */ private int consumeFixedFormContinuation(int i, char buf[], StringBuffer comments) { int i0 = i; // save initial value of i int ii = i + 1; // look ahead past the '\n' // consume all preprocessor and comment lines // i += 1; if (matchPreprocessLine(i, buf)) { return (consumePreprocessLine(i, buf, comments) - 1); // retain the '\n' } if (matchFixedFormCommentLine(i, buf)) { return (consumeFixedFormCommentLine(i, buf, comments) - 1); // retain the '\n' } // search for TAB in columns 1..5, otherwise continued position will be ii for (int j = 0; j < 5; j++) { if (buf[ii] == '\n') return i0; if (buf[ii++] == '\t') { if (buf[ii] >= '1' && buf[ii] <= '9') { return ii+1; } else { if (i == i0 + 1) return i0; // nothing found else return i-1; // '\n' position from comment line } } } if (buf[ii] != '0' && buf[ii] != ' ') { comments.append('\n'); return ii+1; // a continuation found } // if statement begins after '0', replace '0' with ' ' for parsing if (buf[ii] == '0') { buf[ii] = ' '; } return i0; // nothing found (expect possibly replacing '0' in column 6 } /** * Check for a Hollerith following the current character position. First must * ensure that it's not an identifier, "var_2Hxx", so look for preceding * character, ' ', '(', ',' (and perhaps more). Then look for digit string, n, * followed by 'H'|'h' and then n characters (none of which is \'n'). * Perhaps we want to change Hollerith to a quoted string. In any case, * copy string representation to newBuf. * * We would like to be conservative while matching Hollerith's. Examples showing * characters that can precede a Hollerith constant: * " 1Hx", "=1Hx", ".eq.1Hx", "(1Hx", "-1Hx", ",1Hx". Note Hollerith constants * have no data type; they assume a numeric type based on the way they are used. * They cannot assume a character data type and cannot be used where a character * value is expected (from a DEC manual for F77, I believe). Not sure this * applies to Hollerith edit descriptors. * * Assume that strings have been matched so a Hollerith-like constant * within a string doesn't have to been considered. * * Return the last character position in the Hollerith constant if found. */ private int[] consumeHollerith(int i, char buf[], int count, char newBuf[]) { int k; if (i >= super.n) return new int[] {i, count}; // Return i if the first character can be used in a name context, e.g., // "v1H" or "_1H" as this could have been the name "v_1H". A name is // A name is a letter followed by an alphanumeric character // (letter, digit, '_'). // it might be conservative and look for only what CAN precede Hollerith if (buf[i] >= 'a' && buf[i] <= 'z') return new int[] {i, count}; if (buf[i] >= 'A' && buf[i] <= 'Z') return new int[] {i, count}; if (buf[i] == '_') return new int[] {i, count}; // count digits preceding possible Hollerith int ii = i + 1; int numDigits = 0; while (buf[ii] >= '0' && buf[ii] <= '9') { ii += 1; numDigits += 1; } if (numDigits == 0) return new int[] {i, count}; if (buf[ii] != 'H' && buf[ii] != 'h') return new int[] {i, count}; // found Hollerith StringBuffer chars = new StringBuffer(numDigits); for (k = 0; k < numDigits; k++) { chars.append(buf[i+1+k]); } int numChars = Integer.parseInt(chars.toString()); // look for numChars printable characters ii += 1; for (k = 0; k < numChars; k++) { if (buf[ii+k] < ' ' || buf[ii+k] > '-') break; } if (k != numChars) return new int[] {i, count}; // number of characters to copy (includes preceding character) int numTotal = 1 + numDigits + 1 + numChars; // found a Hollerith constant, copy all but last character to newBuf for (k = 0; k < numTotal-1; k++) { newBuf[count++] = buf[i+k]; } return new int[] {i+numTotal-1, count}; } /** * Complete the processing of a string that is continued across multiple lines. */ private int[] completeContinuedString(char quoteChar, int i, char buf[], int count, char newBuf[]) { int i0 = i; // skip initial '&' if (++i >= super.n) return new int[] {i0, count}; // skip characters after initial '&' while (i < super.n && buf[i] != '\n') i += 1; // TODO - check for comment i += 1; // skip '\n' // skip ' ' characters on next line // TODO - what about TABS? while (i < super.n && buf[i] == ' ') i += 1; if (i >= super.n) return new int[] {i-1, count}; // skip trailing '&' // // NOTE: gfortran doesn't require the terminating character (warns with -Wall) // so we also ignore the standard here if the trailing '&' is missing // if (buf[i] == '&') i += 1; if (i >= super.n) return new int[] {i-1, count}; do { newBuf[count++] = buf[i++]; // look for two quote chars in a row, if found copy both if (i < super.n - 1 && buf[i] == quoteChar && buf[i+1] == quoteChar) { newBuf[count++] = buf[i++]; newBuf[count++] = buf[i++]; } } while (i < super.n && buf[i] != quoteChar && buf[i] != '&' && buf[i] != '\n'); return new int[] {i, count}; } /** * Check for the beginning of a string at this character position. */ private boolean matchFreeFormString(int i, char buf[]) { if (i >= super.n) return false; char quote_char = buf[i]; if (quote_char == '"' || quote_char == '\'') return true; else return false; } /** * Check for the beginning of a string at this character position. If * found copy the characters of the string into newBuf, except for the * terminating quote character. A string may be continued, if so it * continues after the '&' character on the next line; return the position * of the trailing '&'. If a string doesn't terminate it's an error, * return '\n' position. */ private int[] consumeFreeFormString(int i, char buf[], int count, char newBuf[]) { if (i >= super.n) return new int[] {i,count}; char quote_char = buf[i]; if (quote_char != '"' && quote_char != '\'') { return new int[] {i,count}; // not the start of a string } do { newBuf[count++] = buf[i++]; // look for two quote chars in a row, if found copy both if (i < super.n - 1 && buf[i] == quote_char && buf[i+1] == quote_char) { newBuf[count++] = buf[i++]; newBuf[count++] = buf[i++]; } // look for continuation character as last non-blank character and // if found, return the '&' position so caller can process continuation if (buf[i] == '&') { int ii = i; while (buf[++ii] == ' '); if (buf[ii] != '\n') { // '&' not a continuation, just part of the string, so just copy it newBuf[count++] = buf[i++]; } } } while (i < super.n && buf[i] != quote_char && buf[i] != '&' && buf[i] != '\n'); return new int[] {i,count}; } /** * Check for the beginning of a string at this character position. */ private boolean matchFixedFormString(int i, char buf[]) { return matchFreeFormString(i, buf); } /** * Check for the beginning of a string at this character position. If * found copy the characters of the string into newBuf, except for the * terminating quote character. If a string doesn't terminate it's an error, * return '\n' position. */ private int consumeFixedFormString(int i, char buf[], int count, char newBuf[]) { if (i >= super.n) return i; char quote_char = buf[i]; if (quote_char != '"' && quote_char != '\'') return i; // not the start of a string do { newBuf[count++] = buf[i++]; // look for two quote chars in a row, if found copy both if (i < super.n - 1 && buf[i] == quote_char && buf[i+1] == quote_char) { newBuf[count++] = buf[i++]; newBuf[count++] = buf[i++]; } } while (i < super.n && buf[i] != quote_char && buf[i] != '\n'); return i; } private int findCharacter(char ch, int i, char buf[]) { int i0 = i; while (i < super.n && buf[i] != ch) i += 1; if (buf[i] == ch) return i; else return i0; } } // end class FortranStream
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.persistence; import java.io.Serializable; import java.util.Arrays; import java.util.Objects; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.function.UnaryOperator; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.ignite.DataRegionMetrics; import org.apache.ignite.DataStorageMetrics; import org.apache.ignite.IgniteCache; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.cache.CacheMode; import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction; import org.apache.ignite.cache.query.annotations.QuerySqlField; import org.apache.ignite.configuration.BinaryConfiguration; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.DataRegionConfiguration; import org.apache.ignite.configuration.DataStorageConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.configuration.WALMode; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.pagemem.wal.record.DataRecord; import org.apache.ignite.internal.processors.cache.WalStateManager.WALDisableContext; import org.apache.ignite.internal.processors.cache.persistence.wal.FileDescriptor; import org.apache.ignite.internal.processors.metric.MetricRegistry; import org.apache.ignite.internal.processors.metric.impl.AtomicLongMetric; import org.apache.ignite.internal.processors.metric.impl.LongAdderMetric; import org.apache.ignite.internal.util.tostring.GridToStringInclude; import org.apache.ignite.internal.util.typedef.PAX; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.mxbean.DataStorageMetricsMXBean; import org.apache.ignite.spi.metric.HistogramMetric; import org.apache.ignite.testframework.ListeningTestLogger; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.junit.Test; import static java.util.Collections.emptyList; import static org.apache.ignite.cache.CacheAtomicityMode.ATOMIC; import static org.apache.ignite.cache.CacheMode.PARTITIONED; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; import static org.apache.ignite.cluster.ClusterState.ACTIVE; import static org.apache.ignite.internal.processors.cache.persistence.DataStorageMetricsImpl.DATASTORAGE_METRIC_PREFIX; import static org.apache.ignite.internal.processors.cache.persistence.wal.serializer.RecordV1Serializer.HEADER_RECORD_SIZE; import static org.apache.ignite.testframework.GridTestUtils.setFieldValue; import static org.apache.ignite.testframework.GridTestUtils.waitForCondition; /** * */ public class IgniteDataStorageMetricsSelfTest extends GridCommonAbstractTest { /** */ private static final String GROUP1 = "grp1"; /** */ private static final String NO_PERSISTENCE = "no-persistence"; /** */ private final ListeningTestLogger listeningLog = new ListeningTestLogger(log); /** {@inheritDoc} */ @Override protected void afterTestsStopped() throws Exception { cleanPersistenceDir(); } /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { super.beforeTest(); cleanPersistenceDir(); } /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(gridName); cfg.setConsistentId(gridName); long maxRegionSize = 20L * 1024 * 1024; DataStorageConfiguration memCfg = new DataStorageConfiguration() .setDefaultDataRegionConfiguration(new DataRegionConfiguration() .setMaxSize(maxRegionSize) .setPersistenceEnabled(true) .setMetricsEnabled(true) .setName("dflt-plc")) .setDataRegionConfigurations(new DataRegionConfiguration() .setMaxSize(maxRegionSize) .setPersistenceEnabled(false) .setMetricsEnabled(true) .setName(NO_PERSISTENCE)) .setWalMode(WALMode.LOG_ONLY) .setMetricsEnabled(true); cfg.setDataStorageConfiguration(memCfg); cfg.setBinaryConfiguration(new BinaryConfiguration().setCompactFooter(false)); cfg.setCacheConfiguration( cacheConfiguration(GROUP1, "cache", PARTITIONED, ATOMIC, 1, null), cacheConfiguration(null, "cache-np", PARTITIONED, ATOMIC, 1, NO_PERSISTENCE)); cfg.setGridLogger(listeningLog); return cfg; } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { stopAllGrids(); cleanPersistenceDir(); super.afterTest(); } /** * @param grpName Cache group name. * @param name Cache name. * @param cacheMode Cache mode. * @param atomicityMode Atomicity mode. * @param backups Backups number. * @return Cache configuration. */ private CacheConfiguration cacheConfiguration( String grpName, String name, CacheMode cacheMode, CacheAtomicityMode atomicityMode, int backups, String dataRegName ) { CacheConfiguration ccfg = new CacheConfiguration(); ccfg.setName(name); ccfg.setGroupName(grpName); ccfg.setAtomicityMode(atomicityMode); ccfg.setBackups(backups); ccfg.setCacheMode(cacheMode); ccfg.setWriteSynchronizationMode(FULL_SYNC); ccfg.setDataRegionName(dataRegName); ccfg.setAffinity(new RendezvousAffinityFunction(false, 32)); if (NO_PERSISTENCE.equals(dataRegName)) ccfg.setDiskPageCompression(null); return ccfg; } /** * @throws Exception if failed. */ @Test public void testPersistenceMetrics() throws Exception { final IgniteEx ig = startGrid(0); ig.active(true); try { IgniteCache<Object, Object> cache = ig.cache("cache"); for (int i = 0; i < 10; i++) cache.put(i, new Person("first-" + i, "last-" + i)); IgniteCache<Object, Object> cacheNp = ig.cache("cache-np"); for (int i = 0; i < 10; i++) cacheNp.put(i, new Person("first-" + i, "last-" + i)); DataRegionMetrics memMetrics = ig.dataRegionMetrics("dflt-plc"); assertNotNull(memMetrics); assertTrue(memMetrics.getDirtyPages() > 0); assertTrue(memMetrics.getPagesFillFactor() > 0); memMetrics = ig.dataRegionMetrics("no-persistence"); assertNotNull(memMetrics); assertTrue(memMetrics.getTotalAllocatedPages() > 0); assertTrue(memMetrics.getPagesFillFactor() > 0); ig.context().cache().context().database().waitForCheckpoint("test"); assertTrue(waitForCondition(new PAX() { @Override public boolean applyx() { DataStorageMetrics pMetrics = ig.dataStorageMetrics(); assertNotNull(pMetrics); return pMetrics.getLastCheckpointTotalPagesNumber() != 0 && pMetrics.getLastCheckpointDataPagesNumber() != 0; } }, 10_000)); } finally { stopAllGrids(); } } /** @throws Exception if failed. */ @Test public void testCheckpointMetrics() throws Exception { Pattern cpPtrn = Pattern.compile("^Checkpoint started .*" + "checkpointBeforeLockTime=(\\d+)ms, " + "checkpointLockWait=(\\d+)ms, " + "checkpointListenersExecuteTime=(\\d+)ms, " + "checkpointLockHoldTime=(\\d+)ms, " + "walCpRecordFsyncDuration=(\\d+)ms, " + "writeCheckpointEntryDuration=(\\d+)ms, " + "splitAndSortCpPagesDuration=(\\d+)ms"); AtomicLong expLastCpBeforeLockDuration = new AtomicLong(); AtomicLong expLastCpLockWaitDuration = new AtomicLong(); AtomicLong expLastCpListenersExecuteDuration = new AtomicLong(); AtomicLong expLastCpLockHoldDuration = new AtomicLong(); AtomicLong expLastCpWalRecordFsyncDuration = new AtomicLong(); AtomicLong expLastCpWriteEntryDuration = new AtomicLong(); AtomicLong expLastCpSplitAndSortPagesDuration = new AtomicLong(); AtomicInteger cpCnt = new AtomicInteger(); listeningLog.registerListener(s -> { Matcher matcher = cpPtrn.matcher(s); if (!matcher.find()) return; expLastCpBeforeLockDuration.set(Long.parseLong(matcher.group(1))); expLastCpLockWaitDuration.set(Long.parseLong(matcher.group(2))); expLastCpListenersExecuteDuration.set(Long.parseLong(matcher.group(3))); expLastCpLockHoldDuration.set(Long.parseLong(matcher.group(4))); expLastCpWalRecordFsyncDuration.set(Long.parseLong(matcher.group(5))); expLastCpWriteEntryDuration.set(Long.parseLong(matcher.group(6))); expLastCpSplitAndSortPagesDuration.set(Long.parseLong(matcher.group(7))); cpCnt.incrementAndGet(); }); IgniteEx node = startGrid(0); node.cluster().state(ACTIVE); GridCacheDatabaseSharedManager db = (GridCacheDatabaseSharedManager)node.context().cache().context().database(); db.checkpointReadLock(); try { waitForCondition(() -> cpCnt.get() > 0, getTestTimeout()); MetricRegistry mreg = node.context().metric().registry(DATASTORAGE_METRIC_PREFIX); AtomicLongMetric lastCpBeforeLockDuration = mreg.findMetric("LastCheckpointBeforeLockDuration"); AtomicLongMetric lastCpLockWaitDuration = mreg.findMetric("LastCheckpointLockWaitDuration"); AtomicLongMetric lastCpListenersExecuteDuration = mreg.findMetric("LastCheckpointListenersExecuteDuration"); AtomicLongMetric lastCpLockHoldDuration = mreg.findMetric("LastCheckpointLockHoldDuration"); AtomicLongMetric lastCpWalRecordFsyncDuration = mreg.findMetric("LastCheckpointWalRecordFsyncDuration"); AtomicLongMetric lastCpWriteEntryDuration = mreg.findMetric("LastCheckpointWriteEntryDuration"); AtomicLongMetric lastCpSplitAndSortPagesDuration = mreg.findMetric("LastCheckpointSplitAndSortPagesDuration"); HistogramMetric cpBeforeLockHistogram = mreg.findMetric("CheckpointBeforeLockHistogram"); HistogramMetric cpLockWaitHistogram = mreg.findMetric("CheckpointLockWaitHistogram"); HistogramMetric cpListenersExecuteHistogram = mreg.findMetric("CheckpointListenersExecuteHistogram"); HistogramMetric cpMarkHistogram = mreg.findMetric("CheckpointMarkHistogram"); HistogramMetric cpLockHoldHistogram = mreg.findMetric("CheckpointLockHoldHistogram"); HistogramMetric cpPagesWriteHistogram = mreg.findMetric("CheckpointPagesWriteHistogram"); HistogramMetric cpFsyncHistogram = mreg.findMetric("CheckpointFsyncHistogram"); HistogramMetric cpWalRecordFsyncHistogram = mreg.findMetric("CheckpointWalRecordFsyncHistogram"); HistogramMetric cpWriteEntryHistogram = mreg.findMetric("CheckpointWriteEntryHistogram"); HistogramMetric cpSplitAndSortPagesHistogram = mreg.findMetric("CheckpointSplitAndSortPagesHistogram"); HistogramMetric cpHistogram = mreg.findMetric("CheckpointHistogram"); waitForCondition(() -> cpCnt.get() == Arrays.stream(cpHistogram.value()).sum(), getTestTimeout()); assertEquals(cpCnt.get(), Arrays.stream(cpBeforeLockHistogram.value()).sum()); assertEquals(cpCnt.get(), Arrays.stream(cpLockWaitHistogram.value()).sum()); assertEquals(cpCnt.get(), Arrays.stream(cpListenersExecuteHistogram.value()).sum()); assertEquals(cpCnt.get(), Arrays.stream(cpMarkHistogram.value()).sum()); assertEquals(cpCnt.get(), Arrays.stream(cpLockHoldHistogram.value()).sum()); assertEquals(cpCnt.get(), Arrays.stream(cpPagesWriteHistogram.value()).sum()); assertEquals(cpCnt.get(), Arrays.stream(cpFsyncHistogram.value()).sum()); assertEquals(cpCnt.get(), Arrays.stream(cpWalRecordFsyncHistogram.value()).sum()); assertEquals(cpCnt.get(), Arrays.stream(cpWriteEntryHistogram.value()).sum()); assertEquals(cpCnt.get(), Arrays.stream(cpSplitAndSortPagesHistogram.value()).sum()); assertEquals(expLastCpBeforeLockDuration.get(), lastCpBeforeLockDuration.value()); assertEquals(expLastCpLockWaitDuration.get(), lastCpLockWaitDuration.value()); assertEquals(expLastCpListenersExecuteDuration.get(), lastCpListenersExecuteDuration.value()); assertEquals(expLastCpLockHoldDuration.get(), lastCpLockHoldDuration.value()); assertEquals(expLastCpWalRecordFsyncDuration.get(), lastCpWalRecordFsyncDuration.value()); assertEquals(expLastCpWriteEntryDuration.get(), lastCpWriteEntryDuration.value()); assertEquals(expLastCpSplitAndSortPagesDuration.get(), lastCpSplitAndSortPagesDuration.value()); } finally { db.checkpointReadUnlock(); } } /** * Checking that the metrics of the total logged bytes are working correctly. * * @throws Exception If failed. */ @Test public void testWalWrittenBytes() throws Exception { IgniteEx n = startGrid(0, (UnaryOperator<IgniteConfiguration>)cfg -> { cfg.getDataStorageConfiguration().setWalSegmentSize((int)(2 * U.MB)); return cfg; }); n.cluster().state(ACTIVE); awaitPartitionMapExchange(); for (int i = 0; i < 10; i++) n.cache("cache").put(ThreadLocalRandom.current().nextLong(), new byte[(int)(32 * U.KB)]); WALDisableContext walDisableCtx = n.context().cache().context().walState().walDisableContext(); assertNotNull(walDisableCtx); setFieldValue(walDisableCtx, "disableWal", true); assertTrue(walDisableCtx.check()); assertNull(walMgr(n).log(new DataRecord(emptyList()))); assertEquals(-1, walMgr(n).lastArchivedSegment()); long exp = walMgr(n).lastWritePointer().fileOffset() - HEADER_RECORD_SIZE; assertEquals(exp, dbMgr(n).persistentStoreMetrics().getWalWrittenBytes()); assertEquals(exp, dsMetricsMXBean(n).getWalWrittenBytes()); assertEquals(exp, ((LongAdderMetric)dsMetricRegistry(n).findMetric("WalWrittenBytes")).value()); } /** * Checking that the metrics of the total size compressed segment are working correctly. * * @throws Exception If failed. */ @Test public void testWalCompressedBytes() throws Exception { IgniteEx n0 = startGrid(0, (UnaryOperator<IgniteConfiguration>)cfg -> { cfg.getDataStorageConfiguration().setWalCompactionEnabled(true).setWalSegmentSize((int)(2 * U.MB)); return cfg; }); n0.cluster().state(ACTIVE); awaitPartitionMapExchange(); while (walMgr(n0).lastArchivedSegment() < 3) n0.cache("cache").put(ThreadLocalRandom.current().nextLong(), new byte[(int)(32 * U.KB)]); waitForCondition( () -> walMgr(n0).lastArchivedSegment() == walMgr(n0).lastCompactedSegment(), getTestTimeout() ); assertCorrectWalCompressedBytesMetrics(n0); stopAllGrids(); IgniteEx n1 = startGrid(0, (UnaryOperator<IgniteConfiguration>)cfg -> { cfg.getDataStorageConfiguration().setWalCompactionEnabled(true); return cfg; }); n1.cluster().state(ACTIVE); awaitPartitionMapExchange(); assertCorrectWalCompressedBytesMetrics(n1); } /** * */ static class Person implements Serializable { /** */ @GridToStringInclude @QuerySqlField(index = true, groups = "full_name") private String fName; /** */ @GridToStringInclude @QuerySqlField(index = true, groups = "full_name") private String lName; /** * @param fName First name. * @param lName Last name. */ public Person(String fName, String lName) { this.fName = fName; this.lName = lName; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(Person.class, this); } /** {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Person person = (Person)o; return Objects.equals(fName, person.fName) && Objects.equals(lName, person.lName); } /** {@inheritDoc} */ @Override public int hashCode() { return Objects.hash(fName, lName); } } /** * Getting DATASTORAGE_METRIC_PREFIX metric registry. * * @param n Node. * @return Group of metrics. */ private MetricRegistry dsMetricRegistry(IgniteEx n) { return n.context().metric().registry(DATASTORAGE_METRIC_PREFIX); } /** * Getting data storage MXBean. * * @param n Node. * @return MXBean. */ private DataStorageMetricsMXBean dsMetricsMXBean(IgniteEx n) { return getMxBean(n.name(), "Persistent Store", "DataStorageMetrics", DataStorageMetricsMXBean.class); } /** * Check that the metric of the total size compressed segment is working correctly. * * @param n Node. */ private void assertCorrectWalCompressedBytesMetrics(IgniteEx n) { long exp = Arrays.stream(walMgr(n).walArchiveFiles()).filter(FileDescriptor::isCompressed) .mapToLong(fd -> fd.file().length()).sum(); assertEquals(exp, dbMgr(n).persistentStoreMetrics().getWalCompressedBytes()); assertEquals(exp, dsMetricsMXBean(n).getWalCompressedBytes()); assertEquals(exp, ((LongAdderMetric)dsMetricRegistry(n).findMetric("WalCompressedBytes")).value()); } }
package org.littleshoot.proxy.impl; import io.netty.util.concurrent.Future; import io.netty.util.concurrent.GenericFutureListener; import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; /** * Coordinates the various steps involved in establishing a connection, such as * establishing a socket connection, SSL handshaking, HTTP CONNECT request * processing, and so on. */ class ConnectionFlow { private Queue<ConnectionFlowStep> steps = new ConcurrentLinkedQueue<ConnectionFlowStep>(); private final ClientToProxyConnection clientConnection; private final ProxyToServerConnection serverConnection; private volatile ConnectionFlowStep currentStep; private volatile boolean suppressInitialRequest = false; private final Object connectLock; /** * Construct a new {@link ConnectionFlow} for the given client and server * connections. * * @param clientConnection * @param serverConnection * @param connectLock * an object that's shared by {@link ConnectionFlow} and * {@link ProxyToServerConnection} and that is used for * synchronizing the reader and writer threads that are both * involved during the establishing of a connection. */ ConnectionFlow( ClientToProxyConnection clientConnection, ProxyToServerConnection serverConnection, Object connectLock) { super(); this.clientConnection = clientConnection; this.serverConnection = serverConnection; this.connectLock = connectLock; } /** * Add a {@link ConnectionFlowStep} to this flow. * * @param step * @return */ ConnectionFlow then(ConnectionFlowStep step) { steps.add(step); return this; } /** * While we're in the process of connecting, any messages read by the * {@link ProxyToServerConnection} are passed to this method, which passes * it on to {@link ConnectionFlowStep#read(ConnectionFlow, Object)} for the * current {@link ConnectionFlowStep}. * * @param msg */ void read(Object msg) { if (this.currentStep != null) { this.currentStep.read(this, msg); } } /** * Starts the connection flow, notifying the {@link ClientToProxyConnection} * that we've started. */ void start() { clientConnection.serverConnectionFlowStarted(serverConnection); advance(); } /** * <p> * Advances the flow. {@link #advance()} will be called until we're either * out of steps, or a step has failed. * </p> */ void advance() { currentStep = steps.poll(); if (currentStep == null) { succeed(); } else { processCurrentStep(); } } /** * <p> * Process the current {@link ConnectionFlowStep}. With each step, we: * </p> * * <ol> * <li>Change the state of the associated {@link ProxyConnection} to the * value of {@link ConnectionFlowStep#getState()}</li> * <li>Call {@link ConnectionFlowStep#execute()}</li> * <li>On completion of the {@link Future} returned by * {@link ConnectionFlowStep#execute()}, check the success.</li> * <li>If successful, we call back into * {@link ConnectionFlowStep#onSuccess(ConnectionFlow)}.</li> * <li>If unsuccessful, we call {@link #fail()}, stopping the connection * flow</li> * </ol> */ private void processCurrentStep() { final ProxyConnection connection = currentStep.getConnection(); final ProxyConnectionLogger LOG = connection.getLOG(); LOG.debug("Processing connection flow step: {}", currentStep); connection.become(currentStep.getState()); suppressInitialRequest = suppressInitialRequest || currentStep.shouldSuppressInitialRequest(); if (currentStep.shouldExecuteOnEventLoop()) { connection.ctx.executor().submit(new Runnable() { @Override public void run() { doProcessCurrentStep(LOG); } }); } else { doProcessCurrentStep(LOG); } } /** * Does the work of processing the current step, checking the result and * handling success/failure. * * @param LOG */ @SuppressWarnings("unchecked") private void doProcessCurrentStep(final ProxyConnectionLogger LOG) { currentStep.execute().addListener( new GenericFutureListener<Future<?>>() { public void operationComplete( Future<?> future) throws Exception { synchronized (connectLock) { if (future.isSuccess()) { LOG.debug("ConnectionFlowStep succeeded"); currentStep .onSuccess(ConnectionFlow.this); } else { LOG.debug("ConnectionFlowStep failed", future.cause()); fail(future.cause()); } } }; }); } /** * Called when the flow is complete and successful. Notifies the * {@link ProxyToServerConnection} that we succeeded. */ void succeed() { synchronized (connectLock) { serverConnection.getLOG().debug( "Connection flow completed successfully: {}", currentStep); serverConnection.connectionSucceeded(!suppressInitialRequest); notifyThreadsWaitingForConnection(); } } /** * Called when the flow fails at some {@link ConnectionFlowStep}. * Disconnects the {@link ProxyToServerConnection} and informs the * {@link ClientToProxyConnection} that our connection failed. */ @SuppressWarnings("unchecked") void fail(final Throwable cause) { final ConnectionState lastStateBeforeFailure = serverConnection .getCurrentState(); serverConnection.disconnect().addListener( new GenericFutureListener() { @Override public void operationComplete(Future future) throws Exception { synchronized (connectLock) { if (!clientConnection.serverConnectionFailed( serverConnection, lastStateBeforeFailure, cause)) { // the connection to the server failed and we are not retrying, so transition to the // DISCONNECTED state serverConnection.become(ConnectionState.DISCONNECTED); // We are not retrying our connection, let anyone waiting for a connection know that we're done notifyThreadsWaitingForConnection(); } } } }); } /** * Like {@link #fail(Throwable)} but with no cause. */ void fail() { fail(null); } /** * Once we've finished recording our connection and written our initial * request, we can notify anyone who is waiting on the connection that it's * okay to proceed. */ private void notifyThreadsWaitingForConnection() { connectLock.notifyAll(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.converter.jaxb.springboot; import org.springframework.boot.context.properties.ConfigurationProperties; /** * Camel JAXB support * * Generated by camel-package-maven-plugin - do not edit this file! */ @ConfigurationProperties(prefix = "camel.dataformat.jaxb") public class JaxbDataFormatConfiguration { /** * Package name where your JAXB classes are located. */ private String contextPath; /** * To validate against an existing schema. Your can use the prefix * classpath: file: or http: to specify how the resource should by resolved. * You can separate multiple schema files by using the '' character. */ private String schema; /** * To enable pretty printing output nicely formatted. Is by default false. */ private Boolean prettyPrint = false; /** * Whether to allow using ObjectFactory classes to create the POJO classes * during marshalling. This only applies to POJO classes that has not been * annotated with JAXB and providing jaxb.index descriptor files. */ private Boolean objectFactory = false; /** * Whether to ignore JAXBElement elements - only needed to be set to false * in very special use-cases. */ private Boolean ignoreJAXBElement = false; /** * Whether marhsalling must be java objects with JAXB annotations. And if * not then it fails. This option can be set to false to relax that such as * when the data is already in XML format. */ private Boolean mustBeJAXBElement = false; /** * To ignore non xml characheters and replace them with an empty space. */ private Boolean filterNonXmlChars = false; /** * To overrule and use a specific encoding */ private String encoding; /** * To turn on marshalling XML fragment trees. By default JAXB looks for * XmlRootElement annotation on given class to operate on whole XML tree. * This is useful but not always - sometimes generated code does not have * XmlRootElement annotation sometimes you need unmarshall only part of * tree. In that case you can use partial unmarshalling. To enable this * behaviours you need set property partClass. Camel will pass this class to * JAXB's unmarshaler. */ private Boolean fragment = false; /** * Name of class used for fragment parsing. See more details at the fragment * option. */ private String partClass; /** * XML namespace to use for fragment parsing. See more details at the * fragment option. */ private String partNamespace; /** * When marshalling using JAXB or SOAP then the JAXB implementation will * automatic assign namespace prefixes such as ns2 ns3 ns4 etc. To control * this mapping Camel allows you to refer to a map which contains the * desired mapping. */ private String namespacePrefixRef; /** * To use a custom xml stream writer. */ private String xmlStreamWriterWrapper; /** * To define the location of the schema */ private String schemaLocation; /** * To define the location of the namespaceless schema */ private String noNamespaceSchemaLocation; /** * Refers to a custom java.util.Map to lookup in the registry containing * custom JAXB provider properties to be used with the JAXB marshaller. */ private String jaxbProviderProperties; /** * Whether the data format should set the Content-Type header with the type * from the data format if the data format is capable of doing so. For * example application/xml for data formats marshalling to XML or * application/json for data formats marshalling to JSon etc. */ private Boolean contentTypeHeader = false; public String getContextPath() { return contextPath; } public void setContextPath(String contextPath) { this.contextPath = contextPath; } public String getSchema() { return schema; } public void setSchema(String schema) { this.schema = schema; } public Boolean getPrettyPrint() { return prettyPrint; } public void setPrettyPrint(Boolean prettyPrint) { this.prettyPrint = prettyPrint; } public Boolean getObjectFactory() { return objectFactory; } public void setObjectFactory(Boolean objectFactory) { this.objectFactory = objectFactory; } public Boolean getIgnoreJAXBElement() { return ignoreJAXBElement; } public void setIgnoreJAXBElement(Boolean ignoreJAXBElement) { this.ignoreJAXBElement = ignoreJAXBElement; } public Boolean getMustBeJAXBElement() { return mustBeJAXBElement; } public void setMustBeJAXBElement(Boolean mustBeJAXBElement) { this.mustBeJAXBElement = mustBeJAXBElement; } public Boolean getFilterNonXmlChars() { return filterNonXmlChars; } public void setFilterNonXmlChars(Boolean filterNonXmlChars) { this.filterNonXmlChars = filterNonXmlChars; } public String getEncoding() { return encoding; } public void setEncoding(String encoding) { this.encoding = encoding; } public Boolean getFragment() { return fragment; } public void setFragment(Boolean fragment) { this.fragment = fragment; } public String getPartClass() { return partClass; } public void setPartClass(String partClass) { this.partClass = partClass; } public String getPartNamespace() { return partNamespace; } public void setPartNamespace(String partNamespace) { this.partNamespace = partNamespace; } public String getNamespacePrefixRef() { return namespacePrefixRef; } public void setNamespacePrefixRef(String namespacePrefixRef) { this.namespacePrefixRef = namespacePrefixRef; } public String getXmlStreamWriterWrapper() { return xmlStreamWriterWrapper; } public void setXmlStreamWriterWrapper(String xmlStreamWriterWrapper) { this.xmlStreamWriterWrapper = xmlStreamWriterWrapper; } public String getSchemaLocation() { return schemaLocation; } public void setSchemaLocation(String schemaLocation) { this.schemaLocation = schemaLocation; } public String getNoNamespaceSchemaLocation() { return noNamespaceSchemaLocation; } public void setNoNamespaceSchemaLocation(String noNamespaceSchemaLocation) { this.noNamespaceSchemaLocation = noNamespaceSchemaLocation; } public String getJaxbProviderProperties() { return jaxbProviderProperties; } public void setJaxbProviderProperties(String jaxbProviderProperties) { this.jaxbProviderProperties = jaxbProviderProperties; } public Boolean getContentTypeHeader() { return contentTypeHeader; } public void setContentTypeHeader(Boolean contentTypeHeader) { this.contentTypeHeader = contentTypeHeader; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.as2.api.io; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; import java.nio.CharBuffer; import java.nio.charset.CharsetDecoder; import java.nio.charset.CoderResult; import org.apache.http.MessageConstraintException; import org.apache.http.config.MessageConstraints; import org.apache.http.impl.io.HttpTransportMetricsImpl; import org.apache.http.io.BufferInfo; import org.apache.http.io.HttpTransportMetrics; import org.apache.http.io.SessionInputBuffer; import org.apache.http.protocol.HTTP; import org.apache.http.util.Args; import org.apache.http.util.Asserts; import org.apache.http.util.ByteArrayBuffer; import org.apache.http.util.CharArrayBuffer; import static org.apache.camel.util.BufferCaster.cast; public class AS2SessionInputBuffer implements SessionInputBuffer, BufferInfo { private final HttpTransportMetricsImpl metrics; private final byte[] buffer; private final ByteArrayBuffer linebuffer; private final int minChunkLimit; private final MessageConstraints constraints; private CharsetDecoder decoder; private InputStream instream; private int bufferpos; private int bufferlen; private CharBuffer cbuf; private boolean lastLineReadTerminatedByLineFeed; public AS2SessionInputBuffer(final HttpTransportMetricsImpl metrics, final int buffersize, final int minChunkLimit, MessageConstraints constraints) { this.metrics = Args.notNull(metrics, "metrics"); Args.positive(buffersize, "buffersize"); this.buffer = new byte[buffersize]; this.bufferpos = 0; this.bufferlen = 0; this.minChunkLimit = minChunkLimit >= 0 ? minChunkLimit : 512; this.constraints = constraints != null ? constraints : MessageConstraints.DEFAULT; this.linebuffer = new ByteArrayBuffer(buffersize); } public AS2SessionInputBuffer(final HttpTransportMetricsImpl metrics, final int buffersize) { this(metrics, buffersize, buffersize, null); } public CharsetDecoder getCharsetDecoder() { return decoder; } public void setCharsetDecoder(CharsetDecoder chardecoder) { this.decoder = chardecoder; } public void bind(final InputStream instream) { this.instream = instream; } public boolean isBound() { return this.instream != null; } @Override public int length() { return this.bufferlen - this.bufferpos; } @Override public int capacity() { return this.buffer.length; } @Override public int available() { return capacity() - length(); } public int fillBuffer() throws IOException { // compact the buffer if necessary if (this.bufferpos > 0) { final int len = this.bufferlen - this.bufferpos; if (len > 0) { System.arraycopy(this.buffer, this.bufferpos, this.buffer, 0, len); } this.bufferpos = 0; this.bufferlen = len; } final int l; final int off = this.bufferlen; final int len = this.buffer.length - off; l = streamRead(this.buffer, off, len); if (l == -1) { return -1; } else { this.bufferlen = off + l; this.metrics.incrementBytesTransferred(l); return l; } } public boolean hasBufferedData() { return this.bufferpos < this.bufferlen; } @Override public int read(byte[] b, int off, int len) throws IOException { if (b == null) { return 0; } if (hasBufferedData()) { final int chunk = Math.min(len, this.bufferlen - this.bufferpos); System.arraycopy(this.buffer, this.bufferpos, b, off, chunk); this.bufferpos += chunk; return chunk; } // If the remaining capacity is big enough, read directly from the // underlying input stream bypassing the buffer. if (len > this.minChunkLimit) { final int read = streamRead(b, off, len); if (read > 0) { this.metrics.incrementBytesTransferred(read); } return read; } else { // otherwise read to the buffer first while (!hasBufferedData()) { final int noRead = fillBuffer(); if (noRead == -1) { return -1; } } final int chunk = Math.min(len, this.bufferlen - this.bufferpos); System.arraycopy(this.buffer, this.bufferpos, b, off, chunk); this.bufferpos += chunk; return chunk; } } @Override public int read(byte[] b) throws IOException { if (b == null) { return 0; } return read(b, 0, b.length); } @Override public int read() throws IOException { int noRead; while (!hasBufferedData()) { noRead = fillBuffer(); if (noRead == -1) { return -1; } } return this.buffer[this.bufferpos++] & 0xff; } @Override public int readLine(CharArrayBuffer charbuffer) throws IOException { Args.notNull(charbuffer, "Char array buffer"); final int maxLineLen = this.constraints.getMaxLineLength(); int noRead = 0; boolean retry = true; this.lastLineReadTerminatedByLineFeed = false; while (retry) { // attempt to find end of line (LF) int pos = -1; for (int i = this.bufferpos; i < this.bufferlen; i++) { if (this.buffer[i] == HTTP.LF) { pos = i; this.lastLineReadTerminatedByLineFeed = true; break; } } if (maxLineLen > 0) { final int currentLen = this.linebuffer.length() + (pos > 0 ? pos : this.bufferlen) - this.bufferpos; if (currentLen >= maxLineLen) { throw new MessageConstraintException("Maximum line length limit exceeded"); } } if (pos != -1) { // end of line found. if (this.linebuffer.isEmpty()) { // the entire line is preset in the read buffer return lineFromReadBuffer(charbuffer, pos); } retry = false; addBytesToLinebuffer(pos); } else { // end of line not found if (hasBufferedData()) { addBytesToLinebuffer(pos); } noRead = fillBuffer(); if (noRead == -1) { // end of stream reached. retry = false; } } } if (noRead == -1 && this.linebuffer.isEmpty()) { // end of stream reached with no further data in line buffer return -1; } return lineFromLineBuffer(charbuffer); } @Override public String readLine() throws IOException { final CharArrayBuffer charbuffer = new CharArrayBuffer(64); final int l = readLine(charbuffer); if (l != -1) { return charbuffer.toString(); } else { return null; } } public boolean isLastLineReadTerminatedByLineFeed() { return lastLineReadTerminatedByLineFeed; } @Override public boolean isDataAvailable(int timeout) throws IOException { return hasBufferedData(); } @Override public HttpTransportMetrics getMetrics() { return this.metrics; } private int streamRead(final byte[] b, final int off, final int len) throws IOException { Asserts.notNull(this.instream, "Input stream"); return this.instream.read(b, off, len); } private int lineFromLineBuffer(final CharArrayBuffer charbuffer) throws IOException { // discard LF if found int len = this.linebuffer.length(); if (len > 0) { if (this.linebuffer.byteAt(len - 1) == HTTP.LF) { len--; } // discard CR if found if (len > 0) { if (this.linebuffer.byteAt(len - 1) == HTTP.CR) { len--; } } } if (this.decoder == null) { charbuffer.append(this.linebuffer, 0, len); } else { final ByteBuffer bbuf = ByteBuffer.wrap(this.linebuffer.buffer(), 0, len); len = appendDecoded(charbuffer, bbuf); } this.linebuffer.clear(); return len; } private int lineFromReadBuffer(final CharArrayBuffer charbuffer, final int position) throws IOException { int pos = position; final int off = this.bufferpos; int len; this.bufferpos = pos + 1; if (pos > off && this.buffer[pos - 1] == HTTP.CR) { // skip CR if found pos--; } len = pos - off; if (this.decoder == null) { charbuffer.append(this.buffer, off, len); } else { final ByteBuffer bbuf = ByteBuffer.wrap(this.buffer, off, len); len = appendDecoded(charbuffer, bbuf); } return len; } private int appendDecoded(final CharArrayBuffer charbuffer, final ByteBuffer bbuf) throws IOException { if (!bbuf.hasRemaining()) { return 0; } if (this.cbuf == null) { this.cbuf = CharBuffer.allocate(1024); } this.decoder.reset(); int len = 0; while (bbuf.hasRemaining()) { final CoderResult result = this.decoder.decode(bbuf, this.cbuf, true); len += handleDecodingResult(result, charbuffer, bbuf); } final CoderResult result = this.decoder.flush(this.cbuf); len += handleDecodingResult(result, charbuffer, bbuf); cast(this.cbuf).clear(); return len; } private int handleDecodingResult(final CoderResult result, final CharArrayBuffer charbuffer, final ByteBuffer bbuf) throws IOException { if (result.isError()) { result.throwException(); } cast(this.cbuf).flip(); final int len = this.cbuf.remaining(); while (this.cbuf.hasRemaining()) { charbuffer.append(this.cbuf.get()); } this.cbuf.compact(); return len; } private void addBytesToLinebuffer(int pos) throws IOException { try { int len; if (pos != -1) { len = pos + 1 - this.bufferpos; } else { len = this.bufferlen - this.bufferpos; } this.linebuffer.append(this.buffer, this.bufferpos, len); if (pos != -1) { this.bufferpos = pos + 1; } else { this.bufferpos = this.bufferlen; } } catch (Exception e) { throw new IOException("failed to decode transfer encoding", e); } } }
package sg.ncl.service.authentication.logic; import freemarker.template.Template; import freemarker.template.TemplateException; import io.jsonwebtoken.Claims; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.RandomStringUtils; import org.json.JSONObject; import org.springframework.security.crypto.password.PasswordEncoder; import org.springframework.stereotype.Service; import org.springframework.ui.freemarker.FreeMarkerTemplateUtils; import sg.ncl.adapter.deterlab.AdapterDeterLab; import sg.ncl.common.DomainProperties; import sg.ncl.common.authentication.Role; import sg.ncl.service.authentication.data.jpa.CredentialsEntity; import sg.ncl.service.authentication.data.jpa.CredentialsRepository; import sg.ncl.service.authentication.data.jpa.PasswordResetRequestEntity; import sg.ncl.service.authentication.data.jpa.PasswordResetRequestRepository; import sg.ncl.service.authentication.domain.Credentials; import sg.ncl.service.authentication.domain.CredentialsService; import sg.ncl.service.authentication.domain.CredentialsStatus; import sg.ncl.service.authentication.exceptions.*; import sg.ncl.service.mail.domain.MailService; import javax.inject.Inject; import javax.inject.Named; import javax.transaction.Transactional; import javax.validation.constraints.NotNull; import java.io.IOException; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.time.ZonedDateTime; import java.util.*; import java.util.stream.Collectors; import static sg.ncl.service.authentication.validation.Validator.*; /** * @author Christopher Zhong */ @Service @Slf4j public class CredentialsServiceImpl implements CredentialsService { private static final int PASSWORD_RESET_REQUEST_TIMEOUT_HOUR = 72; private static final String TESTBED_OPS_EMAIL = "NCL Operations <testbed-ops@ncl.sg>"; private final CredentialsRepository credentialsRepository; private final PasswordEncoder passwordEncoder; private final AdapterDeterLab adapterDeterLab; private final MailService mailService; private final DomainProperties domainProperties; private final Template passwordResetEmailTemplate; private final Template studentResetPasswordTemplate; private final PasswordResetRequestRepository passwordResetRepository; @Inject CredentialsServiceImpl( @NotNull final CredentialsRepository credentialsRepository, @NotNull final PasswordEncoder passwordEncoder, @NotNull final AdapterDeterLab adapterDeterLab, @NotNull final MailService mailService, @NotNull final DomainProperties domainProperties, @NotNull final PasswordResetRequestRepository passwordResetRepository, @NotNull @Named("passwordResetEmailTemplate") final Template passwordResetEmailTemplate, @NotNull @Named("studentResetPasswordTemplate") final Template studentResetPasswordTemplate) { this.credentialsRepository = credentialsRepository; this.passwordEncoder = passwordEncoder; this.adapterDeterLab = adapterDeterLab; this.mailService = mailService; this.domainProperties = domainProperties; this.passwordResetRepository = passwordResetRepository; this.passwordResetEmailTemplate = passwordResetEmailTemplate; this.studentResetPasswordTemplate = studentResetPasswordTemplate; } @Transactional @Override public List<Credentials> getAll() { return credentialsRepository.findAll().stream().collect(Collectors.toList()); } @Transactional @Override public Credentials addCredentials(@NotNull final Credentials credentials) { addCheck(credentials); // check if the user id already exists if (credentialsRepository.findOne(credentials.getId()) == null) { // check if the username already exists if (credentialsRepository.findByUsername(credentials.getUsername()) == null) { final CredentialsEntity entity = new CredentialsEntity(); entity.setId(credentials.getId()); entity.setUsername(credentials.getUsername()); hashPassword(entity, credentials.getPassword()); entity.setStatus(CredentialsStatus.ACTIVE); entity.addRole(Role.USER); final CredentialsEntity saved = credentialsRepository.save(entity); log.info("Credentials created for {}", saved.getId()); return saved; } log.warn("Username '{}' is already associated with a credentials", credentials.getUsername()); throw new UsernameAlreadyExistsException(credentials.getUsername()); } log.warn("User Id '{}' is already associated with a credentials", credentials.getId()); throw new UserIdAlreadyExistsException(credentials.getId()); } @Transactional @Override public Credentials updateCredentials(@NotNull final String id, @NotNull final Credentials credentials, @NotNull final Claims claims) { updateCheck(id, credentials, claims); final CredentialsEntity entity = findCredentials(id); if (credentials.getUsername() != null && !credentials.getUsername().isEmpty()) { entity.setUsername(credentials.getUsername()); } if (credentials.getPassword() != null && !credentials.getPassword().isEmpty()) { hashPassword(entity, credentials.getPassword()); changePassword(id, credentials.getPassword()); } final CredentialsEntity saved = credentialsRepository.save(entity); log.info("Credentials updated: {}", saved); return saved; } @Transactional @Override public void removeCredentials(@NotNull final String id) { CredentialsEntity entity = findCredentials(id); Set<Role> roles = new HashSet<>(entity.getRoles()); roles.forEach(entity::removeRole); credentialsRepository.save(entity); credentialsRepository.delete(entity); } @Transactional @Override public Credentials updateUsername(@NotNull final String id, @NotNull final Credentials credentials) { checkUsername(credentials); final CredentialsEntity entity = findCredentials(id); entity.setUsername(credentials.getUsername()); final CredentialsEntity saved = credentialsRepository.save(entity); log.info("Username updated: {}", saved); return saved; } @Transactional @Override public Credentials updatePassword(@NotNull final String id, @NotNull final Credentials credentials) { checkPassword(credentials); final CredentialsEntity entity = findCredentials(id); hashPassword(entity, credentials.getPassword()); changePassword(credentials.getId(), credentials.getPassword()); final CredentialsEntity saved = credentialsRepository.save(entity); log.info("Password updated: {}", saved); return saved; } @Transactional @Override public Credentials updateStatus(@NotNull final String id, @NotNull final Credentials credentials) { checkStatus(credentials); final CredentialsEntity entity = findCredentials(id); entity.setStatus(credentials.getStatus()); final CredentialsEntity saved = credentialsRepository.save(entity); log.info("Status updated: {}", saved); return saved; } @Transactional @Override public Credentials addRoles(@NotNull final String id, @NotNull final Credentials credentials) { checkRoles(credentials); final CredentialsEntity entity = findCredentials(id); credentials.getRoles().forEach(entity::addRole); final CredentialsEntity saved = credentialsRepository.save(entity); log.info("Roles added: {}", saved); return saved; } @Transactional @Override public Credentials removeRoles(@NotNull final String id, @NotNull final Credentials credentials) { checkRoles(credentials); final CredentialsEntity entity = findCredentials(id); credentials.getRoles().forEach(entity::removeRole); final CredentialsEntity saved = credentialsRepository.save(entity); log.info("Roles removed: {}", saved); return saved; } private CredentialsEntity findCredentials(final String id) { final CredentialsEntity entity = credentialsRepository.findOne(id); // check if the username exists if (entity == null) { log.warn("Credentials for '{}' not found", id); throw new CredentialsNotFoundException(id); } return entity; } private void hashPassword(final CredentialsEntity entity, final String password) { entity.setPassword(passwordEncoder.encode(password)); } /** * Invokes the change password on Deterlab * * @param nclUserId the ncl UUID * @param password the clear password */ private void changePassword(String nclUserId, String password) { JSONObject adapterObject = new JSONObject(); // FIXME: need to handle error when getDeterUserIdByNclUserId() returns nothing adapterObject.put("uid", adapterDeterLab.getDeterUserIdByNclUserId(nclUserId)); adapterObject.put("password1", password); adapterObject.put("password2", password); log.info("Credentials to be updated on Deter on ncl user id: {}", nclUserId); adapterDeterLab.updateCredentials(adapterObject.toString()); } /** * Invokes the reset password on Deterlab * * @param nclUserId the ncl UUID * @param password the new password */ private void resetPassword(String nclUserId, String password) { JSONObject adapterObject = new JSONObject(); // FIXME: need to handle error when getDeterUserIdByNclUserId() returns nothing final String uid = adapterDeterLab.getDeterUserIdByNclUserId(nclUserId); adapterObject.put("uid", uid); adapterObject.put("password", password); adapterDeterLab.resetPassword(adapterObject.toString()); log.info("Password was reset for Deter user {}", uid); } /** * * @param jsonString { * "username": "abc@edf.com" * } */ @Override public void addPasswordResetRequest(String jsonString){ final String username = new JSONObject(jsonString).getString("username"); if(null == username || username.trim().isEmpty()) { log.warn("Username null or empty in password reset request"); throw new UsernameNullOrEmptyException(); } final Credentials one = credentialsRepository.findByUsername(username); if(null == one) { log.warn("User {} not found in password reset request", username); throw new CredentialsNotFoundException(username); } String key = RandomStringUtils.randomAlphanumeric(20); PasswordResetRequestEntity passwordResetRequestEntity = new PasswordResetRequestEntity(); passwordResetRequestEntity.setHash(generateShaHash(key)); passwordResetRequestEntity.setTime(ZonedDateTime.now()); passwordResetRequestEntity.setUsername(username); passwordResetRepository.save(passwordResetRequestEntity); log.info("Password reset request saved: {}", passwordResetRequestEntity.getId()); sendPasswordResetEmail(username, key); } /** * * @param username the email address * @param key the random string before hash */ private void sendPasswordResetEmail(String username, String key) { final Map<String, String> map = new HashMap<>(); map.put("username", username); map.put("domain", domainProperties.getDomain()); map.put("key", key); /* * If sending email fails, we catch the exceptions and log them, * rather than throw the exceptions. Hence, the email will not cause * the main application to fail. If users cannot receive emails after * a certain amount of time, they should send email to support@ncl.sg */ try { String msgText = FreeMarkerTemplateUtils.processTemplateIntoString( passwordResetEmailTemplate, map); mailService.send(TESTBED_OPS_EMAIL, username, "Your Request To Reset Password", msgText, false, null, null); log.info("Password reset email sent: {}", msgText); } catch (IOException | TemplateException e) { log.warn("{}", e); } } /** * Verify whether the password reset request timeout or not * * @param key the random string before hash * * @return credentialsEntity for the user who requests to reset password */ private CredentialsEntity verifyPasswordResetRequestTimeout(String key) { final String hashedId = generateShaHash(key); PasswordResetRequestEntity one = passwordResetRepository.findByHash(hashedId); if(null == one) { log.warn("Password reset request NOT found {}", key); throw new PasswordResetRequestNotFoundException(key); } // check whether the request has timed out or not ZonedDateTime now = ZonedDateTime.now(); if(now.isAfter(one.getTime().plusHours(PASSWORD_RESET_REQUEST_TIMEOUT_HOUR))) { log.warn("Password reset request timeout: request date {}, now {}", one.getTime(), now); throw new PasswordResetRequestTimeoutException("requested on " + one.getTime() + ", now " + now); } return credentialsRepository.findByUsername(one.getUsername()); } /** * Reset password * * @param jsonString { * "key": "1234abcd5678efgh", * "new": "password" * } * * @return credentialsEntity after resetting password */ @Override @Transactional public Credentials resetPassword(final String jsonString) { final JSONObject tmp = new JSONObject(jsonString); final String key = tmp.getString("key"); final String newPassword = tmp.getString("new"); CredentialsEntity one = verifyPasswordResetRequestTimeout(key); if(null == one) { log.warn("Credentials not found for password reset request {}", key); throw new CredentialsNotFoundException("Password reset request " + key); } if (newPassword != null && !newPassword.trim().isEmpty()) { hashPassword(one, newPassword); resetPassword(one.getId(), newPassword); final CredentialsEntity saved = credentialsRepository.save(one); log.info("Password was reset for user {}", one.getUsername()); return saved; } log.warn("Password null or empty in password reset!"); throw new PasswordNullOrEmptyException(); } @Override public boolean verifyPassword(@NotNull final String id, @NotNull final String password) { CredentialsEntity entity = findCredentials(id); return passwordEncoder.matches(password, entity.getPassword()); } private static String generateShaHash(String str) { MessageDigest md = null; try { md = MessageDigest.getInstance("SHA-256"); } catch (NoSuchAlgorithmException e) { log.warn("Error in getting message digest method {}", e); return String.valueOf(str.hashCode()); } md.update(str.getBytes()); byte byteData[] = md.digest(); //convert the byte to hex format method 1 StringBuilder sb = new StringBuilder(); for (int i = 0; i < byteData.length; i++) { sb.append(Integer.toString((byteData[i] & 0xff) + 0x100, 16).substring(1)); } return sb.toString(); } /** * used when a project leader tries to add students into his project * a password reset request will be created for the student who is then informed via email * * @param userName email address used to login to the web portal * @param projectName name of the project/team in which the member is added */ @Override @Transactional public void addPasswordResetRequestForStudent(String userName, String projectName){ if (null == userName || userName.trim().isEmpty()) { log.warn("Username null or empty in password reset request"); throw new UsernameNullOrEmptyException(); } final Credentials one = credentialsRepository.findByUsername(userName); if (null == one) { log.warn("User {} not found in credentials database", userName); throw new CredentialsNotFoundException(userName); } String key = RandomStringUtils.randomAlphanumeric(20); PasswordResetRequestEntity passwordResetRequestEntity = new PasswordResetRequestEntity(); passwordResetRequestEntity.setHash(generateShaHash(key)); passwordResetRequestEntity.setTime(ZonedDateTime.now()); passwordResetRequestEntity.setUsername(userName); passwordResetRepository.save(passwordResetRequestEntity); log.info("Password reset request saved: {}", passwordResetRequestEntity.getId()); final String message = "You have been added to a NCL Project (Name: " + projectName + ")."; sendPasswordResetEmailToStudent(one.getId(), key, userName, message); } private void sendPasswordResetEmailToStudent(String uid, String key, String email, String message) { final Map<String, String> map = new HashMap<>(); map.put("member", email); map.put("message", message); map.put("domain", domainProperties.getDomain()); map.put("key", key); map.put("uid", uid); try { String msgText = FreeMarkerTemplateUtils.processTemplateIntoString(studentResetPasswordTemplate, map); mailService.send(TESTBED_OPS_EMAIL, email, "Reset Password For New Student Member", msgText, false, null, null); //log.info("Password reset email sent: {}", msgText); } catch (IOException | TemplateException e) { log.warn("{}", e); } } /** * New student member needs to set/reset his password when he first login * * @param uid uuid of the new student member * @param key the randomly generated key for reset the password * @param password new password */ @Override @Transactional public void changePasswordStudent(String uid, String key, String password){ CredentialsEntity credentialFromUid = credentialsRepository.findById(uid); if(null == credentialFromUid) { log.warn("Student password reset: credentials from {} not found", uid); throw new CredentialsNotFoundException(uid); } CredentialsEntity credentialFromKey = verifyPasswordResetRequestTimeout(key); if (!credentialFromKey.getId().equals(uid)) { log.warn("Student password reset: uid {} and key {} do not match", uid, key); throw new PasswordResetRequestNotMatchException("Password reset request does not match with the user!"); } if (password != null && !password.trim().isEmpty()) { hashPassword(credentialFromUid, password); credentialsRepository.save(credentialFromUid); log.info("Student password reset for {} is successful", credentialFromUid.getUsername()); } else { log.warn("Student password reset for {}: Password null or empty!", credentialFromUid.getUsername() ); throw new PasswordNullOrEmptyException(); } } @Override public void resetKeyStudent(String uid) { CredentialsEntity credentialFromUid = credentialsRepository.findById(uid); if(null == credentialFromUid) { log.warn("Student password key reset: credential from {} not found", uid); throw new CredentialsNotFoundException(uid); } String username = credentialFromUid.getUsername(); List<PasswordResetRequestEntity> passwordResetRequestEntityList = passwordResetRepository.findByUsername(username); if(passwordResetRequestEntityList.isEmpty()) { log.warn("Student password key reset: password reset request for {} not found", uid); throw new PasswordResetRequestNotFoundException(uid); } String key = RandomStringUtils.randomAlphanumeric(20); PasswordResetRequestEntity passwordResetRequestEntity = new PasswordResetRequestEntity(); passwordResetRequestEntity.setHash(generateShaHash(key)); passwordResetRequestEntity.setTime(ZonedDateTime.now()); passwordResetRequestEntity.setUsername(username); PasswordResetRequestEntity saved = passwordResetRepository.save(passwordResetRequestEntity); log.info("New password key was generated for {}: {}", uid, saved.getId()); sendPasswordResetEmailToStudent(uid, key, username, "Your key has been reset!"); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.axis2.wsdl.util; import org.apache.axis2.Constants; import org.apache.axis2.description.Parameter; import org.apache.axis2.engine.AxisConfiguration; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import javax.wsdl.Binding; import javax.wsdl.BindingFault; import javax.wsdl.BindingInput; import javax.wsdl.BindingOperation; import javax.wsdl.BindingOutput; import javax.wsdl.Definition; import javax.wsdl.Fault; import javax.wsdl.Import; import javax.wsdl.Input; import javax.wsdl.Message; import javax.wsdl.Operation; import javax.wsdl.Output; import javax.wsdl.Part; import javax.wsdl.Port; import javax.wsdl.PortType; import javax.wsdl.Service; import javax.wsdl.Types; import javax.wsdl.extensions.ExtensibilityElement; import javax.wsdl.extensions.ExtensionRegistry; import javax.xml.namespace.QName; import java.net.URL; import java.util.List; import java.util.Map; /** * There are cases where a WSDL definition is kept in memory * in case it needs to be accessed during runtime. In situations where * there are lots of WSDL definitions or big WSDLs, the memory footprint can be * huge. * * This class provides support for processing a WSDL4J definition * with a lower memory footprint. This is useful for certain * environments. * * This class makes the decision on which implementation to use * to reduce memory footprint. This allows other implementations * to be used for specific environments without adding lots of extra * overhead to every environment. * */ public class WSDLDefinitionWrapper implements Definition { private static final Log log = LogFactory.getLog(WSDLDefinitionWrapper.class); private static final boolean isDebugEnabled = log.isDebugEnabled(); private static final String myClassName = "WSDLDefinitionWrapper"; // javax.wsdl.Definition implements java.io.Serializable static final long serialVersionUID = -2788807375814097409L; // the setting used to indicate whether the in-memory copy of the // WSDL definition should be manipulated to reduce memory footprint private boolean reduceWSDLMemoryCache = false; // the optional setting used to specify which type of reduction to use private int reduceWSDLMemoryType = 0; // the wrapper implementation to use private WSDLWrapperImpl wrapperImpl = null; //------------------------------------------------------------------------- // constructors //------------------------------------------------------------------------- /** * Constructor * * @param def The WSDL Definition * @deprecated because this constructor does not provide any guidance for * memory usage */ public WSDLDefinitionWrapper(Definition def) { if (log.isDebugEnabled() ) { log.debug("WSDLDefinitionWrapper(Definition) entry"); } prepare(def, null); } /** * @param def * @param limitMemory true if you want to use a memory sensitive wrapper */ public WSDLDefinitionWrapper(Definition def, boolean limitMemory, int memoryType) { if (log.isDebugEnabled() ) { log.debug("WSDLDefinitionWrapper(Definition, boolean) entry"); } reduceWSDLMemoryCache = limitMemory; reduceWSDLMemoryType = memoryType; prepare(def, null); } /** * @param def WDDL Definition * @param axisConfig Axis Configuration */ public WSDLDefinitionWrapper(Definition def,AxisConfiguration axisConfig ) { if (log.isDebugEnabled() ) { log.debug("WSDLDefinitionWrapper(Definition,AxisConfiguration) entry "); } setupMemoryParms(axisConfig); prepare(def, null); } /** * Constructor * * @param def The WSDL Definition * @param wURL The URL for the wsdl * @deprecated use a constructor with a AxisConfiguration or memory limit parameter */ public WSDLDefinitionWrapper(Definition def, URL wURL) { if (log.isDebugEnabled() ) { log.debug("WSDLDefinitionWrapper(Definition,URL) entry"); } prepare(def, wURL); } /** * Constructor * * @param def The WSDL Definition * @param wURL The URL for the wsdl * @param limitInMemory The setting indicating whether the in-memory WSDL copy * should be manipulated to reduce memory footprint */ public WSDLDefinitionWrapper(Definition def, URL wURL, boolean limitInMemory) { reduceWSDLMemoryCache = limitInMemory; if (log.isDebugEnabled() ) { log.debug("WSDLDefinitionWrapper(Definition,URL,boolean) entry"); } prepare(def, wURL); } /** * Constructor * * @param def The WSDL Definition * @param wURL The URL for the wsdl * @param limitInMemory The setting indicating whether the in-memory WSDL copy * should be manipulated to reduce memory footprint * @param memoryType */ public WSDLDefinitionWrapper(Definition def, URL wURL, boolean limitInMemory, int memoryType) { reduceWSDLMemoryCache = limitInMemory; this.reduceWSDLMemoryType = memoryType; if (log.isDebugEnabled() ) { log.debug("WSDLDefinitionWrapper(Definition,URL,boolean) entry"); } prepare(def, wURL); } /** * Constructor * * @param def The WSDL Definition * @param wURL The URL for the wsdl * @param limitType The setting indicating which reduction technique * to use */ public WSDLDefinitionWrapper(Definition def, URL wURL, int limitType) { reduceWSDLMemoryCache = true; reduceWSDLMemoryType = limitType; if (log.isDebugEnabled() ) { log.debug("WSDLDefinitionWrapper(Definition,URL,int) entry"); } prepare(def, wURL); } /** * Constructor * * @param def The WSDL Definition * @param wURL The URL for the wsdl * @param axisCfg The AxisConfiguration object, to be used to get configuration settings */ public WSDLDefinitionWrapper(Definition def, URL wURL, AxisConfiguration axisCfg) { if (log.isDebugEnabled() ) { log.debug("WSDLDefinitionWrapper(Definition,URL,AxisConfiguration) entry"); } // determine what the setting for the memory optimization is setupMemoryParms(axisCfg); prepare(def, wURL); } private void setupMemoryParms( AxisConfiguration axisCfg) { if (log.isDebugEnabled() ) { log.debug("setupMemoryParms(AxisConfiguration) entry"); } // determine what the setting for the memory optimization is if (axisCfg != null) { Parameter param = axisCfg.getParameter(Constants.Configuration.REDUCE_WSDL_MEMORY_CACHE); reduceWSDLMemoryCache = param != null && ((String) param.getValue()).equalsIgnoreCase("true"); param = axisCfg.getParameter(Constants.Configuration.REDUCE_WSDL_MEMORY_TYPE); if (param != null) { String value = (String) param.getValue(); if (value != null) { Integer i = new Integer(value); reduceWSDLMemoryType = i.intValue(); } } if (log.isDebugEnabled() ) { log.debug("reduceWSDLMemoryCache:"+ reduceWSDLMemoryCache + ", reduceWSDLMemoryType:" + reduceWSDLMemoryType ); } } else { if (log.isDebugEnabled() ) { log.debug("AxisConfiguration is null. This is unexpected" ); } } } /** * Initialize the wsdl definition wrapper * * @param def The WSDL4J definition * @param wURL The URL where the WSDL is obtained */ private void prepare(Definition def, URL wURL) { if (reduceWSDLMemoryCache) { // if the type is specified, then use it // otherwise, default to the serialization technique if (reduceWSDLMemoryType == 2) { // a wrapper implementation that uses release & reload on the // underlying WSDL4J object // this would be desirable for those environments where // many of the WSDL definitions are not serializable wrapperImpl = new WSDLWrapperReloadImpl(def, wURL); } else { // a wrapper implementation that uses serialization to save the // underlying WSDL4J object wrapperImpl = new WSDLWrapperSaveImpl(def, wURL); } } else { // a wrapper implementation that is just a passthrough to the // underlying WSDL4J object wrapperImpl = new WSDLWrapperBasicImpl(def, wURL); } wrapperImpl.releaseResources(); } //------------------------------------------------------------------------- // public WSDLDefinitionWrapper methods //------------------------------------------------------------------------- /* * Returns the WSDL4J Definition object that is being wrapped */ public Definition getUnwrappedDefinition() { return wrapperImpl.getUnwrappedDefinition(); } public int getMemoryLimitType() { return this.reduceWSDLMemoryType; } //------------------------------------------------------------------------- // javax.wsdl.Defintion interface methods //------------------------------------------------------------------------- public void setDocumentBaseURI(String d) { // Set the URI of the base document for the Definition. // This identifies the origin of the Definition. // Note that this is the URI of the base document, not the imports. wrapperImpl.setDocumentBaseURI(d); } public String getDocumentBaseURI() { // Get the URI of the base document for the Definition. // This identifies the origin of the Definition. // Note that this is the URI of the base document, not the imports. return wrapperImpl.getDocumentBaseURI(); } public void setQName(QName n) { wrapperImpl.setQName(n); } public QName getQName() { return wrapperImpl.getQName(); } public void setTargetNamespace(String t) { wrapperImpl.setTargetNamespace(t); } public String getTargetNamespace() { return wrapperImpl.getTargetNamespace(); } public void addNamespace(String prefix, String namespaceURI) { wrapperImpl.addNamespace(prefix, namespaceURI); } public String removeNamespace(String prefix) { return wrapperImpl.removeNamespace(prefix); } public String getNamespace(String prefix) { return wrapperImpl.getNamespace(prefix); } public String getPrefix(String namespaceURI) { return wrapperImpl.getPrefix(namespaceURI); } public Map getNamespaces() { return wrapperImpl.getNamespaces(); } public List getNativeAttributeNames() { return wrapperImpl.getNativeAttributeNames(); } public void setTypes(Types types) { wrapperImpl.setTypes(types); } public Types getTypes() { return wrapperImpl.getTypes(); } public void addImport(Import importDef) { wrapperImpl.addImport(importDef); } public Import removeImport(Import importDef) { return wrapperImpl.removeImport(importDef); } public List getImports(String namespaceURI) { return wrapperImpl.getImports(namespaceURI); } public Map getImports() { return wrapperImpl.getImports(); } public void addMessage(Message message) { wrapperImpl.addMessage(message); } public Message getMessage(QName name) { return wrapperImpl.getMessage(name); } public Message removeMessage(QName name) { return wrapperImpl.removeMessage(name); } public Map getMessages() { return wrapperImpl.getMessages(); } public void addBinding(Binding binding) { wrapperImpl.addBinding(binding); } public Binding getBinding(QName name) { return wrapperImpl.getBinding(name); } public Binding removeBinding(QName name) { return wrapperImpl.removeBinding(name); } public Map getBindings() { return wrapperImpl.getBindings(); } public Map getAllBindings() { return wrapperImpl.getAllBindings(); } public void addPortType(PortType portType) { wrapperImpl.addPortType(portType); } public PortType getPortType(QName name) { return wrapperImpl.getPortType(name); } public PortType removePortType(QName name) { return wrapperImpl.removePortType(name); } public Map getPortTypes() { return wrapperImpl.getPortTypes(); } public Map getAllPortTypes() { return wrapperImpl.getAllPortTypes(); } public void addService(Service service) { wrapperImpl.addService(service); } public Service getService(QName name) { return wrapperImpl.getService(name); } public Service removeService(QName name) { return wrapperImpl.removeService(name); } public Map getServices() { return wrapperImpl.getServices(); } public Map getAllServices() { return wrapperImpl.getAllServices(); } public void setDocumentationElement(org.w3c.dom.Element docEl) { wrapperImpl.setDocumentationElement(docEl); } public org.w3c.dom.Element getDocumentationElement() { return wrapperImpl.getDocumentationElement(); } public void addExtensibilityElement(ExtensibilityElement extElement) { wrapperImpl.addExtensibilityElement(extElement); } public List getExtensibilityElements() { return wrapperImpl.getExtensibilityElements(); } public Binding createBinding() { return wrapperImpl.createBinding(); } public BindingFault createBindingFault() { return wrapperImpl.createBindingFault(); } public BindingInput createBindingInput() { return wrapperImpl.createBindingInput(); } public BindingOperation createBindingOperation() { return wrapperImpl.createBindingOperation(); } public BindingOutput createBindingOutput() { return wrapperImpl.createBindingOutput(); } public Fault createFault() { return wrapperImpl.createFault(); } public Import createImport() { return wrapperImpl.createImport(); } public Input createInput() { return wrapperImpl.createInput(); } public Message createMessage() { return wrapperImpl.createMessage(); } public Operation createOperation() { return wrapperImpl.createOperation(); } public Output createOutput() { return wrapperImpl.createOutput(); } public Part createPart() { return wrapperImpl.createPart(); } public Port createPort() { return wrapperImpl.createPort(); } public PortType createPortType() { return wrapperImpl.createPortType(); } public Service createService() { return wrapperImpl.createService(); } public Types createTypes() { return wrapperImpl.createTypes(); } public void setExtensionRegistry(ExtensionRegistry extReg) { wrapperImpl.setExtensionRegistry(extReg); } public ExtensionRegistry getExtensionRegistry() { return wrapperImpl.getExtensionRegistry(); } public String toString() { return wrapperImpl.toString(); } //------------------------------------------------------------------------- // other AbstractWSDLElement methods //------------------------------------------------------------------------- public ExtensibilityElement removeExtensibilityElement(ExtensibilityElement extElement) { return wrapperImpl.removeExtensibilityElement(extElement); } public java.lang.Object getExtensionAttribute(QName name) { return wrapperImpl.getExtensionAttribute(name); } public Map getExtensionAttributes() { return wrapperImpl.getExtensionAttributes(); } public void setExtensionAttribute(QName name, java.lang.Object value) { wrapperImpl.setExtensionAttribute(name, value); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.charfilter.HTMLStripCharFilterFactory; import org.apache.lucene.analysis.en.PorterStemFilterFactory; import org.apache.lucene.analysis.miscellaneous.LimitTokenCountFilterFactory; import org.apache.lucene.analysis.reverse.ReverseStringFilterFactory; import org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory; import org.elasticsearch.indices.analysis.AnalysisFactoryTestCase; import java.util.List; import java.util.Map; import java.util.TreeMap; import static java.util.Collections.emptyList; import static java.util.stream.Collectors.toList; public class CommonAnalysisFactoryTests extends AnalysisFactoryTestCase { public CommonAnalysisFactoryTests() { super(new CommonAnalysisPlugin()); } @Override protected Map<String, Class<?>> getTokenizers() { Map<String, Class<?>> tokenizers = new TreeMap<>(super.getTokenizers()); tokenizers.put("simplepattern", SimplePatternTokenizerFactory.class); tokenizers.put("simplepatternsplit", SimplePatternSplitTokenizerFactory.class); tokenizers.put("thai", ThaiTokenizerFactory.class); tokenizers.put("ngram", NGramTokenizerFactory.class); tokenizers.put("edgengram", EdgeNGramTokenizerFactory.class); tokenizers.put("classic", ClassicTokenizerFactory.class); tokenizers.put("letter", LetterTokenizerFactory.class); // tokenizers.put("lowercase", XLowerCaseTokenizerFactory.class); tokenizers.put("pathhierarchy", PathHierarchyTokenizerFactory.class); tokenizers.put("pattern", PatternTokenizerFactory.class); tokenizers.put("uax29urlemail", UAX29URLEmailTokenizerFactory.class); tokenizers.put("whitespace", WhitespaceTokenizerFactory.class); tokenizers.put("keyword", KeywordTokenizerFactory.class); return tokenizers; } @Override protected Map<String, Class<?>> getTokenFilters() { Map<String, Class<?>> filters = new TreeMap<>(super.getTokenFilters()); filters.put("asciifolding", ASCIIFoldingTokenFilterFactory.class); filters.put("keywordmarker", KeywordMarkerTokenFilterFactory.class); filters.put("porterstem", PorterStemTokenFilterFactory.class); filters.put("snowballporter", SnowballTokenFilterFactory.class); filters.put("trim", TrimTokenFilterFactory.class); filters.put("worddelimiter", WordDelimiterTokenFilterFactory.class); filters.put("worddelimitergraph", WordDelimiterGraphTokenFilterFactory.class); filters.put("flattengraph", FlattenGraphTokenFilterFactory.class); filters.put("length", LengthTokenFilterFactory.class); filters.put("greeklowercase", LowerCaseTokenFilterFactory.class); filters.put("irishlowercase", LowerCaseTokenFilterFactory.class); filters.put("lowercase", LowerCaseTokenFilterFactory.class); filters.put("turkishlowercase", LowerCaseTokenFilterFactory.class); filters.put("uppercase", UpperCaseTokenFilterFactory.class); filters.put("ngram", NGramTokenFilterFactory.class); filters.put("edgengram", EdgeNGramTokenFilterFactory.class); filters.put("bengalistem", StemmerTokenFilterFactory.class); filters.put("bulgarianstem", StemmerTokenFilterFactory.class); filters.put("englishminimalstem", StemmerTokenFilterFactory.class); filters.put("englishpossessive", StemmerTokenFilterFactory.class); filters.put("finnishlightstem", StemmerTokenFilterFactory.class); filters.put("frenchlightstem", StemmerTokenFilterFactory.class); filters.put("frenchminimalstem", StemmerTokenFilterFactory.class); filters.put("galicianminimalstem", StemmerTokenFilterFactory.class); filters.put("galicianstem", StemmerTokenFilterFactory.class); filters.put("germanlightstem", StemmerTokenFilterFactory.class); filters.put("germanminimalstem", StemmerTokenFilterFactory.class); filters.put("greekstem", StemmerTokenFilterFactory.class); filters.put("hindistem", StemmerTokenFilterFactory.class); filters.put("hungarianlightstem", StemmerTokenFilterFactory.class); filters.put("indonesianstem", StemmerTokenFilterFactory.class); filters.put("italianlightstem", StemmerTokenFilterFactory.class); filters.put("latvianstem", StemmerTokenFilterFactory.class); filters.put("norwegianlightstem", StemmerTokenFilterFactory.class); filters.put("norwegianminimalstem", StemmerTokenFilterFactory.class); filters.put("portuguesestem", StemmerTokenFilterFactory.class); filters.put("portugueselightstem", StemmerTokenFilterFactory.class); filters.put("portugueseminimalstem", StemmerTokenFilterFactory.class); filters.put("russianlightstem", StemmerTokenFilterFactory.class); filters.put("soranistem", StemmerTokenFilterFactory.class); filters.put("spanishlightstem", StemmerTokenFilterFactory.class); filters.put("swedishlightstem", StemmerTokenFilterFactory.class); filters.put("stemmeroverride", StemmerOverrideTokenFilterFactory.class); filters.put("kstem", KStemTokenFilterFactory.class); filters.put("synonym", SynonymTokenFilterFactory.class); filters.put("synonymgraph", SynonymGraphTokenFilterFactory.class); filters.put("dictionarycompoundword", DictionaryCompoundWordTokenFilterFactory.class); filters.put("hyphenationcompoundword", HyphenationCompoundWordTokenFilterFactory.class); filters.put("reversestring", ReverseTokenFilterFactory.class); filters.put("elision", ElisionTokenFilterFactory.class); filters.put("truncate", TruncateTokenFilterFactory.class); filters.put("limittokencount", LimitTokenCountFilterFactory.class); filters.put("commongrams", CommonGramsTokenFilterFactory.class); filters.put("commongramsquery", CommonGramsTokenFilterFactory.class); filters.put("patternreplace", PatternReplaceTokenFilterFactory.class); filters.put("patterncapturegroup", PatternCaptureGroupTokenFilterFactory.class); filters.put("arabicnormalization", ArabicNormalizationFilterFactory.class); filters.put("bengalinormalization", BengaliNormalizationFilterFactory.class); filters.put("germannormalization", GermanNormalizationFilterFactory.class); filters.put("hindinormalization", HindiNormalizationFilterFactory.class); filters.put("indicnormalization", IndicNormalizationFilterFactory.class); filters.put("persiannormalization", PersianNormalizationFilterFactory.class); filters.put("scandinaviannormalization", ScandinavianNormalizationFilterFactory.class); filters.put("serbiannormalization", SerbianNormalizationFilterFactory.class); filters.put("soraninormalization", SoraniNormalizationFilterFactory.class); filters.put("cjkwidth", CJKWidthFilterFactory.class); filters.put("cjkbigram", CJKBigramFilterFactory.class); filters.put("delimitedpayload", DelimitedPayloadTokenFilterFactory.class); filters.put("keepword", KeepWordFilterFactory.class); filters.put("type", KeepTypesFilterFactory.class); filters.put("classic", ClassicFilterFactory.class); filters.put("apostrophe", ApostropheFilterFactory.class); filters.put("decimaldigit", DecimalDigitFilterFactory.class); filters.put("fingerprint", FingerprintTokenFilterFactory.class); filters.put("minhash", MinHashTokenFilterFactory.class); filters.put("scandinavianfolding", ScandinavianFoldingFilterFactory.class); filters.put("arabicstem", ArabicStemTokenFilterFactory.class); filters.put("brazilianstem", BrazilianStemTokenFilterFactory.class); filters.put("czechstem", CzechStemTokenFilterFactory.class); filters.put("germanstem", GermanStemTokenFilterFactory.class); // this filter is not exposed and should only be used internally filters.put("fixedshingle", Void.class); return filters; } @Override protected Map<String, Class<?>> getCharFilters() { Map<String, Class<?>> filters = new TreeMap<>(super.getCharFilters()); filters.put("htmlstrip", HtmlStripCharFilterFactory.class); filters.put("mapping", MappingCharFilterFactory.class); filters.put("patternreplace", PatternReplaceCharFilterFactory.class); // TODO: these charfilters are not yet exposed: useful? // handling of zwnj for persian filters.put("persian", Void.class); return filters; } @Override public Map<String, Class<?>> getPreConfiguredCharFilters() { Map<String, Class<?>> filters = new TreeMap<>(super.getPreConfiguredCharFilters()); filters.put("html_strip", HTMLStripCharFilterFactory.class); filters.put("htmlStrip", HTMLStripCharFilterFactory.class); return filters; } @Override protected Map<String, Class<?>> getPreConfiguredTokenFilters() { Map<String, Class<?>> filters = new TreeMap<>(super.getPreConfiguredTokenFilters()); filters.put("apostrophe", null); filters.put("arabic_normalization", null); filters.put("arabic_stem", null); filters.put("asciifolding", null); filters.put("bengali_normalization", null); filters.put("brazilian_stem", null); filters.put("cjk_bigram", null); filters.put("cjk_width", null); filters.put("classic", null); filters.put("common_grams", null); filters.put("czech_stem", null); filters.put("decimal_digit", null); filters.put("delimited_payload_filter", org.apache.lucene.analysis.payloads.DelimitedPayloadTokenFilterFactory.class); filters.put("delimited_payload", org.apache.lucene.analysis.payloads.DelimitedPayloadTokenFilterFactory.class); filters.put("dutch_stem", SnowballPorterFilterFactory.class); filters.put("edge_ngram", null); filters.put("elision", null); filters.put("french_stem", SnowballPorterFilterFactory.class); filters.put("german_stem", null); filters.put("german_normalization", null); filters.put("hindi_normalization", null); filters.put("indic_normalization", null); filters.put("keyword_repeat", null); filters.put("kstem", null); filters.put("length", null); filters.put("limit", LimitTokenCountFilterFactory.class); filters.put("ngram", null); filters.put("persian_normalization", null); filters.put("porter_stem", null); filters.put("reverse", ReverseStringFilterFactory.class); filters.put("russian_stem", SnowballPorterFilterFactory.class); filters.put("scandinavian_normalization", null); filters.put("scandinavian_folding", null); filters.put("shingle", null); filters.put("snowball", SnowballPorterFilterFactory.class); filters.put("sorani_normalization", null); filters.put("stemmer", PorterStemFilterFactory.class); filters.put("stop", null); filters.put("trim", null); filters.put("truncate", null); filters.put("type_as_payload", null); filters.put("unique", Void.class); filters.put("uppercase", null); filters.put("word_delimiter", null); filters.put("word_delimiter_graph", null); return filters; } @Override protected Map<String, Class<?>> getPreConfiguredTokenizers() { Map<String, Class<?>> tokenizers = new TreeMap<>(super.getPreConfiguredTokenizers()); tokenizers.put("keyword", null); tokenizers.put("lowercase", Void.class); tokenizers.put("classic", null); tokenizers.put("uax_url_email", org.apache.lucene.analysis.standard.UAX29URLEmailTokenizerFactory.class); tokenizers.put("path_hierarchy", null); tokenizers.put("letter", null); tokenizers.put("whitespace", null); tokenizers.put("ngram", null); tokenizers.put("edge_ngram", null); tokenizers.put("pattern", null); tokenizers.put("thai", null); // TODO drop aliases once they are moved to module tokenizers.put("nGram", tokenizers.get("ngram")); tokenizers.put("edgeNGram", tokenizers.get("edge_ngram")); tokenizers.put("PathHierarchy", tokenizers.get("path_hierarchy")); return tokenizers; } /** * Fails if a tokenizer is marked in the superclass with {@link MovedToAnalysisCommon} but * hasn't been marked in this class with its proper factory. */ public void testAllTokenizersMarked() { markedTestCase("char filter", getTokenizers()); } /** * Fails if a char filter is marked in the superclass with {@link MovedToAnalysisCommon} but * hasn't been marked in this class with its proper factory. */ public void testAllCharFiltersMarked() { markedTestCase("char filter", getCharFilters()); } /** * Fails if a char filter is marked in the superclass with {@link MovedToAnalysisCommon} but * hasn't been marked in this class with its proper factory. */ public void testAllTokenFiltersMarked() { markedTestCase("token filter", getTokenFilters()); } private void markedTestCase(String name, Map<String, Class<?>> map) { List<String> unmarked = map.entrySet().stream() .filter(e -> e.getValue() == MovedToAnalysisCommon.class) .map(Map.Entry::getKey) .sorted() .collect(toList()); assertEquals(name + " marked in AnalysisFactoryTestCase as moved to analysis-common " + "but not mapped here", emptyList(), unmarked); } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.ui.spoon.job; import java.text.SimpleDateFormat; import java.util.Date; import java.util.Timer; import java.util.TimerTask; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.CTabItem; import org.eclipse.swt.events.DisposeEvent; import org.eclipse.swt.events.DisposeListener; import org.eclipse.swt.layout.FormAttachment; import org.eclipse.swt.layout.FormData; import org.eclipse.swt.widgets.Display; import org.eclipse.swt.widgets.Tree; import org.eclipse.swt.widgets.TreeColumn; import org.eclipse.swt.widgets.TreeItem; import org.pentaho.di.core.Const; import org.pentaho.di.core.Result; import org.pentaho.di.core.gui.JobTracker; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.JobEntryResult; import org.pentaho.di.ui.core.gui.GUIResource; import org.pentaho.di.ui.core.widget.TreeMemory; import org.pentaho.di.ui.spoon.Spoon; import org.pentaho.di.ui.spoon.delegates.SpoonDelegate; public class JobGridDelegate extends SpoonDelegate { private static Class<?> PKG = JobGraph.class; // for i18n purposes, needed by Translator2!! public static final long REFRESH_TIME = 100L; public static final long UPDATE_TIME_VIEW = 1000L; private static final String STRING_CHEF_LOG_TREE_NAME = "Job Log Tree"; private JobGraph jobGraph; private CTabItem jobGridTab; private Tree wTree; public JobTracker jobTracker; public int previousNrItems; private int nrRow = 0; /** * @param spoon * @param transGraph */ public JobGridDelegate( Spoon spoon, JobGraph transGraph ) { super( spoon ); this.jobGraph = transGraph; } /** * Add a grid with the execution metrics per step in a table view * */ public void addJobGrid() { // First, see if we need to add the extra view... // if ( jobGraph.extraViewComposite == null || jobGraph.extraViewComposite.isDisposed() ) { jobGraph.addExtraView(); } else { if ( jobGridTab != null && !jobGridTab.isDisposed() ) { // just set this one active and get out... // jobGraph.extraViewTabFolder.setSelection( jobGridTab ); return; } } jobGridTab = new CTabItem( jobGraph.extraViewTabFolder, SWT.NONE ); jobGridTab.setImage( GUIResource.getInstance().getImageShowGrid() ); jobGridTab.setText( BaseMessages.getString( PKG, "Spoon.TransGraph.GridTab.Name" ) ); addControls(); jobGridTab.setControl( wTree ); jobGraph.extraViewTabFolder.setSelection( jobGridTab ); } /** * Add the controls to the tab */ private void addControls() { // Create the tree table... wTree = new Tree( jobGraph.extraViewTabFolder, SWT.V_SCROLL | SWT.H_SCROLL ); wTree.setHeaderVisible( true ); TreeMemory.addTreeListener( wTree, STRING_CHEF_LOG_TREE_NAME ); TreeColumn column1 = new TreeColumn( wTree, SWT.LEFT ); column1.setText( BaseMessages.getString( PKG, "JobLog.Column.JobJobEntry" ) ); column1.setWidth( 200 ); TreeColumn column2 = new TreeColumn( wTree, SWT.LEFT ); column2.setText( BaseMessages.getString( PKG, "JobLog.Column.Comment" ) ); column2.setWidth( 200 ); TreeColumn column3 = new TreeColumn( wTree, SWT.LEFT ); column3.setText( BaseMessages.getString( PKG, "JobLog.Column.Result" ) ); column3.setWidth( 100 ); TreeColumn column4 = new TreeColumn( wTree, SWT.LEFT ); column4.setText( BaseMessages.getString( PKG, "JobLog.Column.Reason" ) ); column4.setWidth( 200 ); TreeColumn column5 = new TreeColumn( wTree, SWT.LEFT ); column5.setText( BaseMessages.getString( PKG, "JobLog.Column.Filename" ) ); column5.setWidth( 200 ); TreeColumn column6 = new TreeColumn( wTree, SWT.RIGHT ); column6.setText( BaseMessages.getString( PKG, "JobLog.Column.Nr" ) ); column6.setWidth( 50 ); TreeColumn column7 = new TreeColumn( wTree, SWT.RIGHT ); column7.setText( BaseMessages.getString( PKG, "JobLog.Column.LogDate" ) ); column7.setWidth( 120 ); FormData fdTree = new FormData(); fdTree.left = new FormAttachment( 0, 0 ); fdTree.top = new FormAttachment( 0, 0 ); fdTree.right = new FormAttachment( 100, 0 ); fdTree.bottom = new FormAttachment( 100, 0 ); wTree.setLayoutData( fdTree ); final Timer tim = new Timer( "JobGrid: " + jobGraph.getMeta().getName() ); TimerTask timtask = new TimerTask() { public void run() { Display display = jobGraph.getDisplay(); if ( display != null && !display.isDisposed() ) { display.asyncExec( new Runnable() { public void run() { // Check if the widgets are not disposed. // This happens is the rest of the window is not yet disposed. // We ARE running in a different thread after all. // // TODO: add a "auto refresh" check box somewhere if ( !wTree.isDisposed() ) { refreshTreeTable(); } } } ); } } }; tim.schedule( timtask, 10L, 10L ); // refresh every 2 seconds... jobGraph.jobLogDelegate.getJobLogTab().addDisposeListener( new DisposeListener() { public void widgetDisposed( DisposeEvent disposeEvent ) { tim.cancel(); } } ); } /** * Refresh the data in the tree-table... Use the data from the JobTracker in the job */ private void refreshTreeTable() { if ( jobTracker != null ) { int nrItems = jobTracker.getTotalNumberOfItems(); if ( nrItems != previousNrItems ) { // Allow some flickering for now ;-) wTree.removeAll(); // Re-populate this... TreeItem treeItem = new TreeItem( wTree, SWT.NONE ); String jobName = jobTracker.getJobName(); if ( Const.isEmpty( jobName ) ) { if ( !Const.isEmpty( jobTracker.getJobFilename() ) ) { jobName = jobTracker.getJobFilename(); } else { jobName = BaseMessages.getString( PKG, "JobLog.Tree.StringToDisplayWhenJobHasNoName" ); } } treeItem.setText( 0, jobName ); TreeMemory.getInstance().storeExpanded( STRING_CHEF_LOG_TREE_NAME, new String[] { jobName }, true ); for ( int i = 0; i < jobTracker.nrJobTrackers(); i++ ) { addTrackerToTree( jobTracker.getJobTracker( i ), treeItem ); } previousNrItems = nrItems; TreeMemory.setExpandedFromMemory( wTree, STRING_CHEF_LOG_TREE_NAME ); } } } private void addTrackerToTree( JobTracker jobTracker, TreeItem parentItem ) { try { if ( jobTracker != null ) { TreeItem treeItem = new TreeItem( parentItem, SWT.NONE ); if ( nrRow % 2 != 0 ) { treeItem.setBackground( GUIResource.getInstance().getColorBlueCustomGrid() ); } nrRow++; if ( jobTracker.nrJobTrackers() > 0 ) { // This is a sub-job: display the name at the top of the list... treeItem.setText( 0, BaseMessages.getString( PKG, "JobLog.Tree.JobPrefix" ) + jobTracker.getJobName() ); // then populate the sub-job entries ... for ( int i = 0; i < jobTracker.nrJobTrackers(); i++ ) { addTrackerToTree( jobTracker.getJobTracker( i ), treeItem ); } } else { JobEntryResult result = jobTracker.getJobEntryResult(); if ( result != null ) { String jobEntryName = result.getJobEntryName(); if ( !Const.isEmpty( jobEntryName ) ) { treeItem.setText( 0, jobEntryName ); treeItem.setText( 4, Const.NVL( result.getJobEntryFilename(), "" ) ); } else { treeItem.setText( 0, BaseMessages.getString( PKG, "JobLog.Tree.JobPrefix2" ) + jobTracker.getJobName() ); } String comment = result.getComment(); if ( comment != null ) { treeItem.setText( 1, comment ); } Result res = result.getResult(); if ( res != null ) { treeItem.setText( 2, res.getResult() ? BaseMessages.getString( PKG, "JobLog.Tree.Success" ) : BaseMessages.getString( PKG, "JobLog.Tree.Failure" ) ); treeItem.setText( 5, Long.toString( res.getEntryNr() ) ); if ( res.getResult() ) { treeItem.setForeground( GUIResource.getInstance().getColorSuccessGreen() ); } else { treeItem.setForeground( GUIResource.getInstance().getColorRed() ); } } String reason = result.getReason(); if ( reason != null ) { treeItem.setText( 3, reason ); } Date logDate = result.getLogDate(); if ( logDate != null ) { treeItem.setText( 6, new SimpleDateFormat( "yyyy/MM/dd HH:mm:ss" ).format( logDate ) ); } } } treeItem.setExpanded( true ); } } catch ( Exception e ) { log.logError( Const.getStackTracker( e ) ); } } public CTabItem getJobGridTab() { return jobGridTab; } public void setJobTracker( JobTracker jobTracker ) { this.jobTracker = jobTracker; } }
package edu.uw.zookeeper.safari.storage.snapshot; import static org.junit.Assert.*; import java.util.List; import java.util.Set; import java.util.concurrent.Callable; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.TypeLiteral; import com.google.inject.name.Names; import edu.uw.zookeeper.client.ConnectionClientExecutorService; import edu.uw.zookeeper.common.Pair; import edu.uw.zookeeper.data.AbsoluteZNodePath; import edu.uw.zookeeper.data.Acls; import edu.uw.zookeeper.data.LockableZNodeCache; import edu.uw.zookeeper.data.Materializer; import edu.uw.zookeeper.data.Name; import edu.uw.zookeeper.data.Operations; import edu.uw.zookeeper.data.Sequential; import edu.uw.zookeeper.data.Serializers; import edu.uw.zookeeper.data.SimpleLabelTrie; import edu.uw.zookeeper.data.SimpleNameTrie; import edu.uw.zookeeper.data.ValueNode; import edu.uw.zookeeper.data.ZNode; import edu.uw.zookeeper.data.ZNodeLabel; import edu.uw.zookeeper.data.ZNodeName; import edu.uw.zookeeper.data.ZNodePath; import edu.uw.zookeeper.data.ZNodeSchema; import edu.uw.zookeeper.data.NameTrie.Pointer; import edu.uw.zookeeper.data.Serializers.ByteCodec; import edu.uw.zookeeper.protocol.Message; import edu.uw.zookeeper.safari.AbstractMainTest; import edu.uw.zookeeper.safari.Component; import edu.uw.zookeeper.safari.Modules; import edu.uw.zookeeper.safari.peer.protocol.JacksonModule; import edu.uw.zookeeper.safari.schema.PrefixCreator; import edu.uw.zookeeper.safari.storage.schema.StorageSchema; import edu.uw.zookeeper.safari.storage.schema.StorageZNode; @RunWith(JUnit4.class) public class SequentialEphemeralSnapshotTrieTest extends AbstractMainTest { @ZNode(acl=Acls.Definition.ANYONE_ALL) public static final class SequentialEphemeralSnapshotTrieTestSchema extends StorageZNode<Void> { public static final ZNodePath PATH = ZNodePath.root(); public SequentialEphemeralSnapshotTrieTestSchema( ValueNode<ZNodeSchema> schema, ByteCodec<Object> codec) { super(schema, codec, SimpleNameTrie.<StorageZNode<?>>rootPointer()); } @ZNode public static class Ephemerals extends StorageZNode<Void> { @Name public static final ZNodeLabel LABEL = StorageSchema.Safari.Volumes.Volume.Log.Version.Snapshot.Ephemerals.LABEL; public static final AbsoluteZNodePath PATH = (AbsoluteZNodePath) SequentialEphemeralSnapshotTrieTestSchema.PATH.join(LABEL); public Ephemerals(ValueNode<ZNodeSchema> schema, ByteCodec<Object> codec, Pointer<StorageZNode<?>> parent) { super(schema, codec, parent); } @ZNode public static class Sessions extends StorageZNode.SessionsZNode { public static final AbsoluteZNodePath PATH = Ephemerals.PATH.join(LABEL); public Sessions(ValueNode<ZNodeSchema> schema, ByteCodec<Object> codec, Pointer<StorageZNode<?>> parent) { super(schema, codec, parent); } @ZNode public static class Session extends StorageZNode.SessionZNode<Void> { public static AbsoluteZNodePath pathOf(Long session) { return Sessions.PATH.join(labelOf(session)); } public Session(ValueNode<ZNodeSchema> schema, ByteCodec<Object> codec, Pointer<StorageZNode<?>> parent) { super(schema, codec, parent); } @ZNode public static class Values extends StorageZNode.ValuesZNode { public static AbsoluteZNodePath pathOf(Long session) { return Session.pathOf(session).join(LABEL); } public Values(ValueNode<ZNodeSchema> schema, ByteCodec<Object> codec, Pointer<StorageZNode<?>> parent) { super(schema, codec, parent); } @ZNode public static class Ephemeral extends StorageZNode.EscapedNamedZNode<Void> { public Ephemeral(ValueNode<ZNodeSchema> schema, ByteCodec<Object> codec, Pointer<StorageZNode<?>> parent) { super(schema, codec, parent); } public Ephemeral( String name, ValueNode<ZNodeSchema> schema, ByteCodec<Object> codec, Pointer<StorageZNode<?>> parent) { super(name, schema, codec, parent); } } } } } } } // create a small data set with a mixture of sequentials and ephemerals // assume all leafs are ephemeral // the value stored at a node isPresent() if it is ephemeral and // the value is the owner session public static SimpleNameTrie<ValueNode<Optional<Long>>> createSmallTestData() { final ImmutableMap<String,Long> owners = ImmutableMap.of("a", Long.valueOf(0L), "b", Long.valueOf(1L)); final ImmutableList<Pair<String,Long>> values = ImmutableList.of( Pair.create("b", owners.get("b")), Pair.create("a", (Long) null), Pair.create("a/a", owners.get("a")), Pair.create("a/" + Sequential.fromInt("b", 0).toString(), owners.get("b")), Pair.create("a/" + Sequential.fromInt("a", 1).toString(), owners.get("a")), Pair.create(Sequential.fromInt("c", 0).toString(), (Long) null), Pair.create(Sequential.fromInt("c", 0).toString() + "/a", owners.get("a")), Pair.create(Sequential.fromInt("c", 0).toString() + "/" + Sequential.fromInt("b", 0).toString(), owners.get("b")), Pair.create(Sequential.fromInt("b", 1).toString(), owners.get("b")), Pair.create(Sequential.fromInt("a", 2).toString(), owners.get("a"))); return createTestData(values); } public static SimpleNameTrie<ValueNode<Optional<Long>>> createTestData( ImmutableList<Pair<String,Long>> values) { final SimpleNameTrie<ValueNode<Optional<Long>>> data = SimpleNameTrie.forRoot(ValueNode.root(Optional.<Long>absent())); for (Pair<String,Long> v: values) { ZNodePath path = ZNodePath.root().join(ZNodeName.fromString(v.first())); ValueNode<Optional<Long>> parent = data.longestPrefix(path); ValueNode<Optional<Long>> child = ValueNode.child(Optional.fromNullable(v.second()), path.suffix(parent.path()), parent); data.put(child.path(), child); } for (ValueNode<Optional<Long>> node: data) { boolean isPresent = node.get().isPresent(); if (node.isEmpty()) { assertTrue(isPresent); } else { assertFalse(isPresent); } } assertEquals(values.size()+1, data.size()); return data; } public static void snapshot(final SimpleNameTrie<ValueNode<Optional<Long>>> data, final Materializer<StorageZNode<?>,Message.ServerResponse<?>> backend, final Materializer<StorageZNode<?>,Message.ServerResponse<?>> frontend) throws Exception { // first create sessions in frontend client PrefixCreator.call(frontend).get(); Set<Long> sessions = Sets.newHashSet(); for (ValueNode<Optional<Long>> node: data) { if (node.get().isPresent()) { Long session = node.get().get(); if (sessions.add(session)) { Operations.unlessProtocolError(frontend.create(SequentialEphemeralSnapshotTrieTestSchema.Ephemerals.Sessions.Session.pathOf(session)).call().get()); } } } // second, snapshot values in backend client for (Long session: sessions) { Operations.unlessProtocolError(backend.create(SequentialEphemeralSnapshotTrieTestSchema.Ephemerals.Sessions.Session.Values.pathOf(session)).call().get()); } for (ValueNode<Optional<Long>> node: data) { if (node.get().isPresent()) { Long session = node.get().get(); ZNodeLabel label = SequentialEphemeralSnapshotTrieTestSchema.Ephemerals.Sessions.Session.Values.Ephemeral.labelOf(node.path().suffix(ZNodePath.root())); Operations.unlessProtocolError(backend.create(SequentialEphemeralSnapshotTrieTestSchema.Ephemerals.Sessions.Session.Values.pathOf(session).join(label)).call().get()); } } } public static void validate(final SimpleNameTrie<ValueNode<Optional<Long>>> data, final LockableZNodeCache<StorageZNode<?>,?,?> cache, final Pair<SimpleLabelTrie<SequentialNode<AbsoluteZNodePath>>,? extends Set<AbsoluteZNodePath>> result) { // check that computed set of leaves equals the actual trie leaves ImmutableSet.Builder<AbsoluteZNodePath> leaves = ImmutableSet.builder(); for (SequentialNode<AbsoluteZNodePath> node: result.first()) { if (node.isEmpty() && !node.path().isRoot()) { leaves.add((AbsoluteZNodePath) node.path()); } } assertEquals(leaves.build(), result.second()); // check that the leaves are equal to the sequential data leaves leaves = ImmutableSet.builder(); for (ValueNode<Optional<Long>> node: data) { if (node.isEmpty() && !node.path().isRoot() && Sequential.maybeFromString(node.path().label().toString()).isPresent()) { leaves.add((AbsoluteZNodePath) node.path()); } } assertEquals(leaves.build(), result.second()); // check that the value of leaves points to the correct snapshot path cache.lock().readLock().lock(); try { for (AbsoluteZNodePath path: result.second()) { SequentialNode<AbsoluteZNodePath> leaf = result.first().get(path); SequentialEphemeralSnapshotTrieTestSchema.Ephemerals.Sessions.Session.Values.Ephemeral ephemeral = (SequentialEphemeralSnapshotTrieTestSchema.Ephemerals.Sessions.Session.Values.Ephemeral) cache.cache().get(leaf.getValue()); assertEquals(leaf.path(), ZNodePath.root().join(ZNodeName.fromString(ephemeral.name()))); } } finally { cache.lock().readLock().unlock(); } } @Test(timeout=10000) public void test() throws Exception { final SimpleNameTrie<ValueNode<Optional<Long>>> data = createSmallTestData(); final List<Component<?>> components = Modules.newServerAndClientComponents(); final Injector injector = stopping(components, JacksonModule.create()); final Callable<Void> callable = new Callable<Void>() { @Override public Void call() throws Exception { // create a frontend and backend materializer final Serializers.ByteCodec<Object> codec = injector.getInstance(Key.get(new TypeLiteral<Serializers.ByteCodec<Object>>(){})); final int FRONT = 0; final int BACK = 1; final List<Materializer<StorageZNode<?>,Message.ServerResponse<?>>> materializers = Lists.newArrayListWithCapacity(BACK+1); for (int i=0; i<BACK+1; ++i) { materializers.add(Materializer.<StorageZNode<?>, Message.ServerResponse<?>>fromHierarchy(SequentialEphemeralSnapshotTrieTestSchema.class, codec, injector.getInstance(Key.get(Component.class, Names.named("client"))).injector().getInstance(ConnectionClientExecutorService.Builder.class).getConnectionClientExecutor())); } snapshot(data, materializers.get(BACK), materializers.get(FRONT)); final Pair<SimpleLabelTrie<SequentialNode<AbsoluteZNodePath>>,? extends Set<AbsoluteZNodePath>> result = SequentialEphemeralSnapshotTrie.create(SequentialEphemeralSnapshotTrieTestSchema.PATH, materializers.get(FRONT), logger).call().get(); validate(data, materializers.get(FRONT).cache(), result); return null; } }; callWithService( injector, callable); } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/vision/v1p3beta1/image_annotator.proto package com.google.cloud.vision.v1p3beta1; /** * * * <pre> * Detected entity location information. * </pre> * * Protobuf type {@code google.cloud.vision.v1p3beta1.LocationInfo} */ public final class LocationInfo extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.vision.v1p3beta1.LocationInfo) LocationInfoOrBuilder { private static final long serialVersionUID = 0L; // Use LocationInfo.newBuilder() to construct. private LocationInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private LocationInfo() {} @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private LocationInfo( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.type.LatLng.Builder subBuilder = null; if (latLng_ != null) { subBuilder = latLng_.toBuilder(); } latLng_ = input.readMessage(com.google.type.LatLng.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(latLng_); latLng_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownFieldProto3(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1p3beta1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1p3beta1_LocationInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1p3beta1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1p3beta1_LocationInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1p3beta1.LocationInfo.class, com.google.cloud.vision.v1p3beta1.LocationInfo.Builder.class); } public static final int LAT_LNG_FIELD_NUMBER = 1; private com.google.type.LatLng latLng_; /** * * * <pre> * lat/long location coordinates. * </pre> * * <code>.google.type.LatLng lat_lng = 1;</code> */ public boolean hasLatLng() { return latLng_ != null; } /** * * * <pre> * lat/long location coordinates. * </pre> * * <code>.google.type.LatLng lat_lng = 1;</code> */ public com.google.type.LatLng getLatLng() { return latLng_ == null ? com.google.type.LatLng.getDefaultInstance() : latLng_; } /** * * * <pre> * lat/long location coordinates. * </pre> * * <code>.google.type.LatLng lat_lng = 1;</code> */ public com.google.type.LatLngOrBuilder getLatLngOrBuilder() { return getLatLng(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (latLng_ != null) { output.writeMessage(1, getLatLng()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (latLng_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getLatLng()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.vision.v1p3beta1.LocationInfo)) { return super.equals(obj); } com.google.cloud.vision.v1p3beta1.LocationInfo other = (com.google.cloud.vision.v1p3beta1.LocationInfo) obj; boolean result = true; result = result && (hasLatLng() == other.hasLatLng()); if (hasLatLng()) { result = result && getLatLng().equals(other.getLatLng()); } result = result && unknownFields.equals(other.unknownFields); return result; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasLatLng()) { hash = (37 * hash) + LAT_LNG_FIELD_NUMBER; hash = (53 * hash) + getLatLng().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.vision.v1p3beta1.LocationInfo parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p3beta1.LocationInfo parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p3beta1.LocationInfo parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p3beta1.LocationInfo parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p3beta1.LocationInfo parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.vision.v1p3beta1.LocationInfo parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.vision.v1p3beta1.LocationInfo parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p3beta1.LocationInfo parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1p3beta1.LocationInfo parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p3beta1.LocationInfo parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.vision.v1p3beta1.LocationInfo parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.vision.v1p3beta1.LocationInfo parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.vision.v1p3beta1.LocationInfo prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Detected entity location information. * </pre> * * Protobuf type {@code google.cloud.vision.v1p3beta1.LocationInfo} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.vision.v1p3beta1.LocationInfo) com.google.cloud.vision.v1p3beta1.LocationInfoOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.vision.v1p3beta1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1p3beta1_LocationInfo_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.vision.v1p3beta1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1p3beta1_LocationInfo_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.vision.v1p3beta1.LocationInfo.class, com.google.cloud.vision.v1p3beta1.LocationInfo.Builder.class); } // Construct using com.google.cloud.vision.v1p3beta1.LocationInfo.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); if (latLngBuilder_ == null) { latLng_ = null; } else { latLng_ = null; latLngBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.vision.v1p3beta1.ImageAnnotatorProto .internal_static_google_cloud_vision_v1p3beta1_LocationInfo_descriptor; } @java.lang.Override public com.google.cloud.vision.v1p3beta1.LocationInfo getDefaultInstanceForType() { return com.google.cloud.vision.v1p3beta1.LocationInfo.getDefaultInstance(); } @java.lang.Override public com.google.cloud.vision.v1p3beta1.LocationInfo build() { com.google.cloud.vision.v1p3beta1.LocationInfo result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.vision.v1p3beta1.LocationInfo buildPartial() { com.google.cloud.vision.v1p3beta1.LocationInfo result = new com.google.cloud.vision.v1p3beta1.LocationInfo(this); if (latLngBuilder_ == null) { result.latLng_ = latLng_; } else { result.latLng_ = latLngBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return (Builder) super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return (Builder) super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return (Builder) super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return (Builder) super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return (Builder) super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.vision.v1p3beta1.LocationInfo) { return mergeFrom((com.google.cloud.vision.v1p3beta1.LocationInfo) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.vision.v1p3beta1.LocationInfo other) { if (other == com.google.cloud.vision.v1p3beta1.LocationInfo.getDefaultInstance()) return this; if (other.hasLatLng()) { mergeLatLng(other.getLatLng()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.vision.v1p3beta1.LocationInfo parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.vision.v1p3beta1.LocationInfo) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private com.google.type.LatLng latLng_ = null; private com.google.protobuf.SingleFieldBuilderV3< com.google.type.LatLng, com.google.type.LatLng.Builder, com.google.type.LatLngOrBuilder> latLngBuilder_; /** * * * <pre> * lat/long location coordinates. * </pre> * * <code>.google.type.LatLng lat_lng = 1;</code> */ public boolean hasLatLng() { return latLngBuilder_ != null || latLng_ != null; } /** * * * <pre> * lat/long location coordinates. * </pre> * * <code>.google.type.LatLng lat_lng = 1;</code> */ public com.google.type.LatLng getLatLng() { if (latLngBuilder_ == null) { return latLng_ == null ? com.google.type.LatLng.getDefaultInstance() : latLng_; } else { return latLngBuilder_.getMessage(); } } /** * * * <pre> * lat/long location coordinates. * </pre> * * <code>.google.type.LatLng lat_lng = 1;</code> */ public Builder setLatLng(com.google.type.LatLng value) { if (latLngBuilder_ == null) { if (value == null) { throw new NullPointerException(); } latLng_ = value; onChanged(); } else { latLngBuilder_.setMessage(value); } return this; } /** * * * <pre> * lat/long location coordinates. * </pre> * * <code>.google.type.LatLng lat_lng = 1;</code> */ public Builder setLatLng(com.google.type.LatLng.Builder builderForValue) { if (latLngBuilder_ == null) { latLng_ = builderForValue.build(); onChanged(); } else { latLngBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * lat/long location coordinates. * </pre> * * <code>.google.type.LatLng lat_lng = 1;</code> */ public Builder mergeLatLng(com.google.type.LatLng value) { if (latLngBuilder_ == null) { if (latLng_ != null) { latLng_ = com.google.type.LatLng.newBuilder(latLng_).mergeFrom(value).buildPartial(); } else { latLng_ = value; } onChanged(); } else { latLngBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * lat/long location coordinates. * </pre> * * <code>.google.type.LatLng lat_lng = 1;</code> */ public Builder clearLatLng() { if (latLngBuilder_ == null) { latLng_ = null; onChanged(); } else { latLng_ = null; latLngBuilder_ = null; } return this; } /** * * * <pre> * lat/long location coordinates. * </pre> * * <code>.google.type.LatLng lat_lng = 1;</code> */ public com.google.type.LatLng.Builder getLatLngBuilder() { onChanged(); return getLatLngFieldBuilder().getBuilder(); } /** * * * <pre> * lat/long location coordinates. * </pre> * * <code>.google.type.LatLng lat_lng = 1;</code> */ public com.google.type.LatLngOrBuilder getLatLngOrBuilder() { if (latLngBuilder_ != null) { return latLngBuilder_.getMessageOrBuilder(); } else { return latLng_ == null ? com.google.type.LatLng.getDefaultInstance() : latLng_; } } /** * * * <pre> * lat/long location coordinates. * </pre> * * <code>.google.type.LatLng lat_lng = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.type.LatLng, com.google.type.LatLng.Builder, com.google.type.LatLngOrBuilder> getLatLngFieldBuilder() { if (latLngBuilder_ == null) { latLngBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.type.LatLng, com.google.type.LatLng.Builder, com.google.type.LatLngOrBuilder>(getLatLng(), getParentForChildren(), isClean()); latLng_ = null; } return latLngBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFieldsProto3(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.vision.v1p3beta1.LocationInfo) } // @@protoc_insertion_point(class_scope:google.cloud.vision.v1p3beta1.LocationInfo) private static final com.google.cloud.vision.v1p3beta1.LocationInfo DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.vision.v1p3beta1.LocationInfo(); } public static com.google.cloud.vision.v1p3beta1.LocationInfo getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<LocationInfo> PARSER = new com.google.protobuf.AbstractParser<LocationInfo>() { @java.lang.Override public LocationInfo parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new LocationInfo(input, extensionRegistry); } }; public static com.google.protobuf.Parser<LocationInfo> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<LocationInfo> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.vision.v1p3beta1.LocationInfo getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
package com.actionbarsherlock.app; import android.annotation.TargetApi; import android.app.Activity; import android.content.res.Configuration; import android.os.Bundle; import android.view.KeyEvent; import android.view.View; import android.view.Window; import android.view.ViewGroup.LayoutParams; import com.actionbarsherlock.ActionBarSherlock; import com.actionbarsherlock.ActionBarSherlock.OnActionModeFinishedListener; import com.actionbarsherlock.ActionBarSherlock.OnActionModeStartedListener; import com.actionbarsherlock.ActionBarSherlock.OnCreatePanelMenuListener; import com.actionbarsherlock.ActionBarSherlock.OnMenuItemSelectedListener; import com.actionbarsherlock.ActionBarSherlock.OnPreparePanelListener; import com.actionbarsherlock.view.ActionMode; import com.actionbarsherlock.view.Menu; import com.actionbarsherlock.view.MenuInflater; import com.actionbarsherlock.view.MenuItem; public abstract class SherlockActivity extends Activity implements OnCreatePanelMenuListener, OnPreparePanelListener, OnMenuItemSelectedListener, OnActionModeStartedListener, OnActionModeFinishedListener { private ActionBarSherlock mSherlock; protected final ActionBarSherlock getSherlock() { if (mSherlock == null) { mSherlock = ActionBarSherlock.wrap(this, ActionBarSherlock.FLAG_DELEGATE); } return mSherlock; } /////////////////////////////////////////////////////////////////////////// // Action bar and mode /////////////////////////////////////////////////////////////////////////// public ActionBar getSupportActionBar() { return getSherlock().getActionBar(); } public ActionMode startActionMode(ActionMode.Callback callback) { return getSherlock().startActionMode(callback); } public void onActionModeStarted(ActionMode mode) {} public void onActionModeFinished(ActionMode mode) {} /////////////////////////////////////////////////////////////////////////// // General lifecycle/callback dispatching /////////////////////////////////////////////////////////////////////////// public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); getSherlock().dispatchConfigurationChanged(newConfig); } protected void onPostResume() { super.onPostResume(); getSherlock().dispatchPostResume(); } protected void onPause() { getSherlock().dispatchPause(); super.onPause(); } protected void onStop() { getSherlock().dispatchStop(); super.onStop(); } protected void onDestroy() { getSherlock().dispatchDestroy(); super.onDestroy(); } protected void onPostCreate(Bundle savedInstanceState) { getSherlock().dispatchPostCreate(savedInstanceState); super.onPostCreate(savedInstanceState); } protected void onTitleChanged(CharSequence title, int color) { getSherlock().dispatchTitleChanged(title, color); super.onTitleChanged(title, color); } public final boolean onMenuOpened(int featureId, android.view.Menu menu) { if (getSherlock().dispatchMenuOpened(featureId, menu)) { return true; } return super.onMenuOpened(featureId, menu); } public void onPanelClosed(int featureId, android.view.Menu menu) { getSherlock().dispatchPanelClosed(featureId, menu); super.onPanelClosed(featureId, menu); } public boolean dispatchKeyEvent(KeyEvent event) { if (getSherlock().dispatchKeyEvent(event)) { return true; } return super.dispatchKeyEvent(event); } /////////////////////////////////////////////////////////////////////////// // Native menu handling /////////////////////////////////////////////////////////////////////////// public MenuInflater getSupportMenuInflater() { return getSherlock().getMenuInflater(); } public void invalidateOptionsMenu() { getSherlock().dispatchInvalidateOptionsMenu(); } @TargetApi(11) public void supportInvalidateOptionsMenu() { invalidateOptionsMenu(); } public final boolean onCreateOptionsMenu(android.view.Menu menu) { return getSherlock().dispatchCreateOptionsMenu(menu); } public final boolean onPrepareOptionsMenu(android.view.Menu menu) { return getSherlock().dispatchPrepareOptionsMenu(menu); } public final boolean onOptionsItemSelected(android.view.MenuItem item) { return getSherlock().dispatchOptionsItemSelected(item); } public void openOptionsMenu() { if (!getSherlock().dispatchOpenOptionsMenu()) { super.openOptionsMenu(); } } public void closeOptionsMenu() { if (!getSherlock().dispatchCloseOptionsMenu()) { super.closeOptionsMenu(); } } /////////////////////////////////////////////////////////////////////////// // Sherlock menu handling /////////////////////////////////////////////////////////////////////////// public boolean onCreatePanelMenu(int featureId, Menu menu) { if (featureId == Window.FEATURE_OPTIONS_PANEL) { return onCreateOptionsMenu(menu); } return false; } public boolean onCreateOptionsMenu(Menu menu) { return true; } public boolean onPreparePanel(int featureId, View view, Menu menu) { if (featureId == Window.FEATURE_OPTIONS_PANEL) { return onPrepareOptionsMenu(menu); } return false; } public boolean onPrepareOptionsMenu(Menu menu) { return true; } public boolean onMenuItemSelected(int featureId, MenuItem item) { if (featureId == Window.FEATURE_OPTIONS_PANEL) { return onOptionsItemSelected(item); } return false; } public boolean onOptionsItemSelected(MenuItem item) { return false; } /////////////////////////////////////////////////////////////////////////// // Content /////////////////////////////////////////////////////////////////////////// public void addContentView(View view, LayoutParams params) { getSherlock().addContentView(view, params); } public void setContentView(int layoutResId) { getSherlock().setContentView(layoutResId); } public void setContentView(View view, LayoutParams params) { getSherlock().setContentView(view, params); } public void setContentView(View view) { getSherlock().setContentView(view); } public void requestWindowFeature(long featureId) { getSherlock().requestFeature((int)featureId); } /////////////////////////////////////////////////////////////////////////// // Progress Indication /////////////////////////////////////////////////////////////////////////// public void setSupportProgress(int progress) { getSherlock().setProgress(progress); } public void setSupportProgressBarIndeterminate(boolean indeterminate) { getSherlock().setProgressBarIndeterminate(indeterminate); } public void setSupportProgressBarIndeterminateVisibility(boolean visible) { getSherlock().setProgressBarIndeterminateVisibility(visible); } public void setSupportProgressBarVisibility(boolean visible) { getSherlock().setProgressBarVisibility(visible); } public void setSupportSecondaryProgress(int secondaryProgress) { getSherlock().setSecondaryProgress(secondaryProgress); } }