repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
openegovplatform/OEPv2
oep-dossier-portlet/docroot/WEB-INF/service/org/oep/dossiermgt/model/DossierProcModel.java
13659
/** * Copyright (c) 2000-present Liferay, Inc. All rights reserved. * * This library is free software; you can redistribute it and/or modify it under * the terms of the GNU Lesser General Public License as published by the Free * Software Foundation; either version 2.1 of the License, or (at your option) * any later version. * * This library is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more * details. */ package org.oep.dossiermgt.model; import com.liferay.portal.kernel.bean.AutoEscape; import com.liferay.portal.kernel.exception.SystemException; import com.liferay.portal.model.BaseModel; import com.liferay.portal.model.CacheModel; import com.liferay.portal.service.ServiceContext; import com.liferay.portlet.expando.model.ExpandoBridge; import java.io.Serializable; import java.util.Date; /** * The base model interface for the DossierProc service. Represents a row in the &quot;oep_dossiermgt_dossierproc&quot; database table, with each column mapped to a property of this class. * * <p> * This interface and its corresponding implementation {@link org.oep.dossiermgt.model.impl.DossierProcModelImpl} exist only as a container for the default property accessors generated by ServiceBuilder. Helper methods and all application logic should be put in {@link org.oep.dossiermgt.model.impl.DossierProcImpl}. * </p> * * @author trungdk * @see DossierProc * @see org.oep.dossiermgt.model.impl.DossierProcImpl * @see org.oep.dossiermgt.model.impl.DossierProcModelImpl * @generated */ public interface DossierProcModel extends BaseModel<DossierProc> { /* * NOTE FOR DEVELOPERS: * * Never modify or reference this interface directly. All methods that expect a dossier proc model instance should use the {@link DossierProc} interface instead. */ /** * Returns the primary key of this dossier proc. * * @return the primary key of this dossier proc */ public long getPrimaryKey(); /** * Sets the primary key of this dossier proc. * * @param primaryKey the primary key of this dossier proc */ public void setPrimaryKey(long primaryKey); /** * Returns the dossier proc ID of this dossier proc. * * @return the dossier proc ID of this dossier proc */ public long getDossierProcId(); /** * Sets the dossier proc ID of this dossier proc. * * @param dossierProcId the dossier proc ID of this dossier proc */ public void setDossierProcId(long dossierProcId); /** * Returns the user ID of this dossier proc. * * @return the user ID of this dossier proc */ public long getUserId(); /** * Sets the user ID of this dossier proc. * * @param userId the user ID of this dossier proc */ public void setUserId(long userId); /** * Returns the user uuid of this dossier proc. * * @return the user uuid of this dossier proc * @throws SystemException if a system exception occurred */ public String getUserUuid() throws SystemException; /** * Sets the user uuid of this dossier proc. * * @param userUuid the user uuid of this dossier proc */ public void setUserUuid(String userUuid); /** * Returns the group ID of this dossier proc. * * @return the group ID of this dossier proc */ public long getGroupId(); /** * Sets the group ID of this dossier proc. * * @param groupId the group ID of this dossier proc */ public void setGroupId(long groupId); /** * Returns the company ID of this dossier proc. * * @return the company ID of this dossier proc */ public long getCompanyId(); /** * Sets the company ID of this dossier proc. * * @param companyId the company ID of this dossier proc */ public void setCompanyId(long companyId); /** * Returns the create date of this dossier proc. * * @return the create date of this dossier proc */ public Date getCreateDate(); /** * Sets the create date of this dossier proc. * * @param createDate the create date of this dossier proc */ public void setCreateDate(Date createDate); /** * Returns the modified date of this dossier proc. * * @return the modified date of this dossier proc */ public Date getModifiedDate(); /** * Sets the modified date of this dossier proc. * * @param modifiedDate the modified date of this dossier proc */ public void setModifiedDate(Date modifiedDate); /** * Returns the dossier proc no of this dossier proc. * * @return the dossier proc no of this dossier proc */ @AutoEscape public String getDossierProcNo(); /** * Sets the dossier proc no of this dossier proc. * * @param dossierProcNo the dossier proc no of this dossier proc */ public void setDossierProcNo(String dossierProcNo); /** * Returns the name of this dossier proc. * * @return the name of this dossier proc */ @AutoEscape public String getName(); /** * Sets the name of this dossier proc. * * @param name the name of this dossier proc */ public void setName(String name); /** * Returns the en name of this dossier proc. * * @return the en name of this dossier proc */ @AutoEscape public String getEnName(); /** * Sets the en name of this dossier proc. * * @param enName the en name of this dossier proc */ public void setEnName(String enName); /** * Returns the short name of this dossier proc. * * @return the short name of this dossier proc */ @AutoEscape public String getShortName(); /** * Sets the short name of this dossier proc. * * @param shortName the short name of this dossier proc */ public void setShortName(String shortName); /** * Returns the process description of this dossier proc. * * @return the process description of this dossier proc */ @AutoEscape public String getProcessDescription(); /** * Sets the process description of this dossier proc. * * @param processDescription the process description of this dossier proc */ public void setProcessDescription(String processDescription); /** * Returns the method description of this dossier proc. * * @return the method description of this dossier proc */ @AutoEscape public String getMethodDescription(); /** * Sets the method description of this dossier proc. * * @param methodDescription the method description of this dossier proc */ public void setMethodDescription(String methodDescription); /** * Returns the dossier description of this dossier proc. * * @return the dossier description of this dossier proc */ @AutoEscape public String getDossierDescription(); /** * Sets the dossier description of this dossier proc. * * @param dossierDescription the dossier description of this dossier proc */ public void setDossierDescription(String dossierDescription); /** * Returns the condition description of this dossier proc. * * @return the condition description of this dossier proc */ @AutoEscape public String getConditionDescription(); /** * Sets the condition description of this dossier proc. * * @param conditionDescription the condition description of this dossier proc */ public void setConditionDescription(String conditionDescription); /** * Returns the duration description of this dossier proc. * * @return the duration description of this dossier proc */ @AutoEscape public String getDurationDescription(); /** * Sets the duration description of this dossier proc. * * @param durationDescription the duration description of this dossier proc */ public void setDurationDescription(String durationDescription); /** * Returns the actors description of this dossier proc. * * @return the actors description of this dossier proc */ @AutoEscape public String getActorsDescription(); /** * Sets the actors description of this dossier proc. * * @param actorsDescription the actors description of this dossier proc */ public void setActorsDescription(String actorsDescription); /** * Returns the results description of this dossier proc. * * @return the results description of this dossier proc */ @AutoEscape public String getResultsDescription(); /** * Sets the results description of this dossier proc. * * @param resultsDescription the results description of this dossier proc */ public void setResultsDescription(String resultsDescription); /** * Returns the records description of this dossier proc. * * @return the records description of this dossier proc */ @AutoEscape public String getRecordsDescription(); /** * Sets the records description of this dossier proc. * * @param recordsDescription the records description of this dossier proc */ public void setRecordsDescription(String recordsDescription); /** * Returns the fee description of this dossier proc. * * @return the fee description of this dossier proc */ @AutoEscape public String getFeeDescription(); /** * Sets the fee description of this dossier proc. * * @param feeDescription the fee description of this dossier proc */ public void setFeeDescription(String feeDescription); /** * Returns the instructions description of this dossier proc. * * @return the instructions description of this dossier proc */ @AutoEscape public String getInstructionsDescription(); /** * Sets the instructions description of this dossier proc. * * @param instructionsDescription the instructions description of this dossier proc */ public void setInstructionsDescription(String instructionsDescription); /** * Returns the administration no of this dossier proc. * * @return the administration no of this dossier proc */ @AutoEscape public String getAdministrationNo(); /** * Sets the administration no of this dossier proc. * * @param administrationNo the administration no of this dossier proc */ public void setAdministrationNo(String administrationNo); /** * Returns the administration name of this dossier proc. * * @return the administration name of this dossier proc */ @AutoEscape public String getAdministrationName(); /** * Sets the administration name of this dossier proc. * * @param administrationName the administration name of this dossier proc */ public void setAdministrationName(String administrationName); /** * Returns the domain no of this dossier proc. * * @return the domain no of this dossier proc */ @AutoEscape public String getDomainNo(); /** * Sets the domain no of this dossier proc. * * @param domainNo the domain no of this dossier proc */ public void setDomainNo(String domainNo); /** * Returns the domain name of this dossier proc. * * @return the domain name of this dossier proc */ @AutoEscape public String getDomainName(); /** * Sets the domain name of this dossier proc. * * @param domainName the domain name of this dossier proc */ public void setDomainName(String domainName); /** * Returns the for citizen of this dossier proc. * * @return the for citizen of this dossier proc */ public int getForCitizen(); /** * Sets the for citizen of this dossier proc. * * @param forCitizen the for citizen of this dossier proc */ public void setForCitizen(int forCitizen); /** * Returns the for business of this dossier proc. * * @return the for business of this dossier proc */ public int getForBusiness(); /** * Sets the for business of this dossier proc. * * @param forBusiness the for business of this dossier proc */ public void setForBusiness(int forBusiness); /** * Returns the effect date of this dossier proc. * * @return the effect date of this dossier proc */ public Date getEffectDate(); /** * Sets the effect date of this dossier proc. * * @param effectDate the effect date of this dossier proc */ public void setEffectDate(Date effectDate); /** * Returns the expire date of this dossier proc. * * @return the expire date of this dossier proc */ public Date getExpireDate(); /** * Sets the expire date of this dossier proc. * * @param expireDate the expire date of this dossier proc */ public void setExpireDate(Date expireDate); /** * Returns the status active of this dossier proc. * * @return the status active of this dossier proc */ public int getStatusActive(); /** * Sets the status active of this dossier proc. * * @param statusActive the status active of this dossier proc */ public void setStatusActive(int statusActive); @Override public boolean isNew(); @Override public void setNew(boolean n); @Override public boolean isCachedModel(); @Override public void setCachedModel(boolean cachedModel); @Override public boolean isEscapedModel(); @Override public Serializable getPrimaryKeyObj(); @Override public void setPrimaryKeyObj(Serializable primaryKeyObj); @Override public ExpandoBridge getExpandoBridge(); @Override public void setExpandoBridgeAttributes(BaseModel<?> baseModel); @Override public void setExpandoBridgeAttributes(ExpandoBridge expandoBridge); @Override public void setExpandoBridgeAttributes(ServiceContext serviceContext); @Override public Object clone(); @Override public int compareTo(DossierProc dossierProc); @Override public int hashCode(); @Override public CacheModel<DossierProc> toCacheModel(); @Override public DossierProc toEscapedModel(); @Override public DossierProc toUnescapedModel(); @Override public String toString(); @Override public String toXmlString(); }
apache-2.0
zhangjunfang/jstorm-0.9.6.3-
jstorm-client/src/main/java/storm/trident/operation/impl/ChainedAggregatorImpl.java
3786
package storm.trident.operation.impl; import backtype.storm.tuple.Fields; import java.util.List; import java.util.Map; import storm.trident.operation.Aggregator; import storm.trident.operation.TridentCollector; import storm.trident.operation.TridentOperationContext; import storm.trident.tuple.ComboList; import storm.trident.tuple.TridentTuple; import storm.trident.tuple.TridentTupleView.ProjectionFactory; @SuppressWarnings({"unchecked","rawtypes"}) public class ChainedAggregatorImpl implements Aggregator<ChainedResult> { /** * */ private static final long serialVersionUID = 6229346978859771699L; Aggregator[] _aggs; ProjectionFactory[] _inputFactories; ComboList.Factory _fact; Fields[] _inputFields; public ChainedAggregatorImpl(Aggregator[] aggs, Fields[] inputFields, ComboList.Factory fact) { _aggs = aggs; _inputFields = inputFields; _fact = fact; if(_aggs.length!=_inputFields.length) { throw new IllegalArgumentException("Require input fields for each aggregator"); } } @Override public void prepare(Map conf, TridentOperationContext context) { _inputFactories = new ProjectionFactory[_inputFields.length]; for(int i=0; i<_inputFields.length; i++) { _inputFactories[i] = context.makeProjectionFactory(_inputFields[i]); _aggs[i].prepare(conf, new TridentOperationContext(context, _inputFactories[i])); } } @Override public ChainedResult init(Object batchId, TridentCollector collector) { ChainedResult initted = new ChainedResult(collector, _aggs.length); for(int i=0; i<_aggs.length; i++) { initted.objs[i] = _aggs[i].init(batchId, initted.collectors[i]); } return initted; } @Override public void aggregate(ChainedResult val, TridentTuple tuple, TridentCollector collector) { val.setFollowThroughCollector(collector); for(int i=0; i<_aggs.length; i++) { TridentTuple projected = _inputFactories[i].create(tuple); _aggs[i].aggregate(val.objs[i], projected, val.collectors[i]); } } @Override public void complete(ChainedResult val, TridentCollector collector) { val.setFollowThroughCollector(collector); for(int i=0; i<_aggs.length; i++) { _aggs[i].complete(val.objs[i], val.collectors[i]); } if(_aggs.length > 1) { // otherwise, tuples were emitted directly int[] indices = new int[val.collectors.length]; for(int i=0; i<indices.length; i++) { indices[i] = 0; } boolean keepGoing = true; //emit cross-join of all emitted tuples while(keepGoing) { List[] combined = new List[_aggs.length]; for(int i=0; i< _aggs.length; i++) { CaptureCollector capturer = (CaptureCollector) val.collectors[i]; combined[i] = capturer.captured.get(indices[i]); } collector.emit(_fact.create(combined)); keepGoing = increment(val.collectors, indices, indices.length - 1); } } } //return false if can't increment anymore private boolean increment(TridentCollector[] lengths, int[] indices, int j) { if(j==-1) return false; indices[j]++; CaptureCollector capturer = (CaptureCollector) lengths[j]; if(indices[j] >= capturer.captured.size()) { indices[j] = 0; return increment(lengths, indices, j-1); } return true; } @Override public void cleanup() { for(Aggregator a: _aggs) { a.cleanup(); } } }
apache-2.0
diegoRodriguezAguila/Cobranza.Elfec.Mobile
cobranzaElfecMobile/src/main/java/com/elfec/cobranza/presenter/adapter_interfaces/ICollectionBaseAdapter.java
1921
package com.elfec.cobranza.presenter.adapter_interfaces; import android.content.Context; import android.graphics.drawable.Drawable; import android.widget.ArrayAdapter; import com.elfec.cobranza.model.CoopReceipt; import com.elfec.cobranza.presenter.CollectionActionPresenter; import com.elfec.cobranza.presenter.views.ICollectionActionView; import java.util.List; /** * Provee de una abstracción del adapter para cobranzas * @author drodriguez * */ public interface ICollectionBaseAdapter { /** * Obtiene el titulo para la vista * @return titulo */ public String getActionTitle(); /** * Obtiene el id del drawable para el titulo de la vista * @return id del drawable */ public int getTitleDrawableId(); /** * Obtiene el texto del boton de la acción * @return texto boton */ public String getButtonText(); /** * Obtiene el drawable para el boton * @return drawable */ public Drawable getButtonDrawable(); /** * Obtiene el titulo que se utilizará para la lista de recibos * @return titulo */ public String getReceiptListTitle(); /** * Obtiene el id de la cadena que se muestra en el titulo de * los errores de la acción * @return id de la string */ public int getActionErrorsTitleId(); /** * Obtiene el id de la cadena de mensaje de éxito * @return string id */ public int getActionSuccessMsgId(); /** * Obtiene el presenter adecuado para la acción de cobranza */ public CollectionActionPresenter getCollectionPresenter(ICollectionActionView view); /** * Obtiene el adapter para la lista de facturas * @param receipts * @return */ public ArrayAdapter<CoopReceipt> getReceiptAdapter(List<CoopReceipt> receipts); /** * Obtiene el context * @return */ public Context getContext(); /** * Indica si se tiene o no que mostrar la opción del menu de elegir una impresora * @return */ public boolean hasToShowPickPrinter(); }
apache-2.0
servioticy/servioticy-api-commons
src/main/java/com/servioticy/api/commons/elasticsearch/SearchCriteria.java
5680
package com.servioticy.api.commons.elasticsearch; import java.io.IOException; import java.lang.reflect.Field; import org.apache.log4j.Logger; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.index.query.AndFilterBuilder; import org.elasticsearch.index.query.FilterBuilders; import org.elasticsearch.index.query.GeoBoundingBoxFilterBuilder; import org.elasticsearch.index.query.GeoDistanceFilterBuilder; import org.elasticsearch.index.query.RangeFilterBuilder; import org.elasticsearch.index.query.TermFilterBuilder; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.servioticy.api.commons.data.Group; public class SearchCriteria { private static Logger LOG = org.apache.log4j.Logger.getLogger(SearchCriteria.class); private static ObjectMapper mapper = new ObjectMapper(); // RANGES *********************************** public boolean timerange = false; public boolean numericrange = false; // GENERIC public double rangefrom = Double.MIN_VALUE; public double rangeto = Double.MAX_VALUE; // ONLY FOR GENERICRANGE public String numericrangefield; // LIMIT *********************************** public boolean limit = false; public int limitcount; // GEODISTANCE ***************************** public boolean geodistance = false; // GENERIC public double pointlat; public double pointlon; // DISTANCE public double geodistancevalue; public String geodistanceunit = "km"; // GEOSHAPE ******************************** public boolean geoboundingbox = false; // BOX public double geoboxupperleftlat; public double geoboxupperleftlon; public double geoboxbottomrightlat; public double geoboxbottomrightlon; // MATCH ******************************** public boolean match = false; public String matchfield; public String matchstring; public static SearchCriteria buildFromJson(String searchCriteriaJson) { try { System.out.println("Building from: --" + searchCriteriaJson + "--"); SearchCriteria res = mapper.readValue(searchCriteriaJson, SearchCriteria.class); return res; } catch (JsonProcessingException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } return null; } // Returns true if combination is possible public boolean valid() { return (timerange || (numericrange && numericrangefield != null) || (timerange && numericrange && numericrangefield != null && !numericrangefield.contains("couchbaseDocument.doc.lastUpdate")) || geodistance ^ geoboundingbox) || (match && (matchfield != null && matchstring != null)) ; } public String buildFilter() { StringBuilder filter = new StringBuilder(); if(!valid()) return null; AndFilterBuilder global = FilterBuilders.andFilter(); if(timerange) { RangeFilterBuilder rangeFilter = FilterBuilders.rangeFilter("doc.lastUpdate") .from((long)rangefrom).to((long)rangeto) .includeLower(true).includeUpper(true); //filter.append(rangeFilter.toString()); global.add(rangeFilter); } if(numericrange) { RangeFilterBuilder numericrangeFilter = FilterBuilders.rangeFilter("doc."+numericrangefield) .from(rangefrom).includeLower(true) .to(rangeto).includeUpper(true); //filter.append(numericrangeFilter()); global.add(numericrangeFilter); } if(geodistance) { GeoDistanceFilterBuilder geodistanceFilter = FilterBuilders.geoDistanceFilter("doc.channels.location.current-value") .distance(geodistancevalue, DistanceUnit.fromString(geodistanceunit)) .point(pointlat,pointlon); //filter.append(geodistanceFilter.toString()); global.add(geodistanceFilter); } if(geoboundingbox) { GeoBoundingBoxFilterBuilder geodbboxFilter = FilterBuilders.geoBoundingBoxFilter("doc.channels.location.current-value") .topLeft(geoboxupperleftlat, geoboxupperleftlon) .bottomRight(geoboxbottomrightlat, geoboxbottomrightlon); //filter.append(geodbboxFilter()); global.add(geodbboxFilter); } if(match) { TermFilterBuilder matchFilter = FilterBuilders.termFilter("doc." + matchfield, matchstring); //filter.append(matchFilter.toString()); global.add(matchFilter); } filter.append(global.toString()); return filter.toString(); } public String toString() { StringBuilder res = new StringBuilder(); for (Field field : this.getClass().getDeclaredFields()) { try { res.append(field.getName() + ": " + field.get(this)+"\n"); } catch (IllegalArgumentException e) { LOG.error(e); } catch (IllegalAccessException e) { LOG.error(e); } } return res.toString(); } }
apache-2.0
robertoschwald/jasig-cas-examples-robertoschwald
cas-server-support-webservice/src/main/java/com/symentis/cas/ws/sample/endpoint/ExampleAuthenticationEndpoint.java
2217
/** * Copyright 2014 symentis GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.symentis.cas.ws.sample.endpoint; import com.symentis.cas.ws.sample.client.ExampleWsClient; import com.symentis.cas.ws.samples.auth.schema.AuthResponse; import org.apache.commons.lang.StringUtils; import com.symentis.cas.ws.samples.auth.schema.AuthRequest; import com.symentis.cas.ws.samples.auth.schema.ObjectFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.ws.server.endpoint.annotation.Endpoint; import org.springframework.ws.server.endpoint.annotation.PayloadRoot; import org.springframework.ws.server.endpoint.annotation.RequestPayload; import org.springframework.ws.server.endpoint.annotation.ResponsePayload; /** * Simple Spring WS Soap endpoint, just to test the WebserviceAuthenticationHandler with ExampleWsClient against. */ @Endpoint public class ExampleAuthenticationEndpoint { private static final Logger log = LoggerFactory.getLogger(ExampleAuthenticationEndpoint.class); @PayloadRoot(namespace = "http://roos.cas.samples.ws.org/auth", localPart = "authRequest") @ResponsePayload public AuthResponse authenticate(@RequestPayload AuthRequest request) throws Exception { log.warn("Dummy-Authenticating user with any password"); AuthResponse response = new ObjectFactory().createAuthResponse(); if (StringUtils.isBlank(request.getNetid()) || StringUtils.isBlank(request.getPassword())){ throw new Exception("netid and password mandatory."); } // attributes response.setNetid(request.getNetid()); response.setLastname("TestLastName"); response.setFirstname("TestFirstName"); return response; } }
apache-2.0
googleads/googleads-java-lib
modules/dfp_axis/src/main/java/com/google/api/ads/admanager/axis/v202111/CustomCriteriaNode.java
3499
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * CustomCriteriaNode.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.admanager.axis.v202111; /** * A {@link CustomCriteriaNode} is a node in the custom targeting * tree. A custom * criteria node can either be a {@link CustomCriteriaSet} * (a non-leaf node) or * a {@link CustomCriteria} (a leaf node). The custom criteria * targeting tree is * subject to the rules defined on {@link Targeting#customTargeting}. */ public abstract class CustomCriteriaNode implements java.io.Serializable { public CustomCriteriaNode() { } @Override public String toString() { return com.google.common.base.MoreObjects.toStringHelper(this.getClass()) .omitNullValues() .toString(); } private java.lang.Object __equalsCalc = null; public synchronized boolean equals(java.lang.Object obj) { if (!(obj instanceof CustomCriteriaNode)) return false; CustomCriteriaNode other = (CustomCriteriaNode) obj; if (obj == null) return false; if (this == obj) return true; if (__equalsCalc != null) { return (__equalsCalc == obj); } __equalsCalc = obj; boolean _equals; _equals = true; __equalsCalc = null; return _equals; } private boolean __hashCodeCalc = false; public synchronized int hashCode() { if (__hashCodeCalc) { return 0; } __hashCodeCalc = true; int _hashCode = 1; __hashCodeCalc = false; return _hashCode; } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(CustomCriteriaNode.class, true); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202111", "CustomCriteriaNode")); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } /** * Get Custom Serializer */ public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanSerializer( _javaType, _xmlType, typeDesc); } /** * Get Custom Deserializer */ public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanDeserializer( _javaType, _xmlType, typeDesc); } }
apache-2.0
zuevasasha78/Stady-JavaForTester
addressbook-selenium-tests/src/com/example/tests/GroupRemovalTests.java
838
package com.example.tests; import static org.junit.Assert.assertThat; import static org.hamcrest.Matchers.*; import java.util.Random; import org.testng.annotations.Test; import com.example.utils.SortedListOf; public class GroupRemovalTests extends TestBase { @Test public void deleteSomeGroup(){ // save older state SortedListOf<GroupData> oldList = app.getGroupHelper().getGroups(); Random rnd = new Random(); int index = rnd.nextInt(oldList.size() - 1); // actions app.getGroupHelper().deleteGroup(index); // save new state SortedListOf<GroupData> newList = app.getGroupHelper().getGroups(); // compare state //assertEquals(newList.size(), oldList.size() + 1); assertThat(newList, equalTo(oldList.without(index))); } }
apache-2.0
hillbw/exi-test
java/esiaisparser/src/main/java/nl/esi/metis/aisparser/HandleInvalidVDMMessage.java
152
package nl.esi.metis.aisparser; public interface HandleInvalidVDMMessage { public void handleInvalidVDMMessage (VDMMessage invalidVDMMessage); }
apache-2.0
freeVM/freeVM
enhanced/archive/classlib/java6/modules/regex/src/main/java/java/util/regex/Lexer.java
41583
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Nikolay A. Kuznetsov * @version $Revision: 1.21.2.2 $ */ package java.util.regex; import java.util.MissingResourceException; import org.apache.harmony.regex.internal.nls.Messages; /** * The purpose of this class is to break given pattern into RE tokens; * * @author Nikolay A. Kuznetsov * @version $Revision: 1.21.2.2 $ */ class Lexer { public static final int CHAR_DOLLAR = 0xe0000000 | '$'; public static final int CHAR_RIGHT_PARENTHESIS = 0xe0000000 | ')'; public static final int CHAR_LEFT_SQUARE_BRACKET = 0xe0000000 | '['; public static final int CHAR_RIGHT_SQUARE_BRACKET = 0xe0000000 | ']'; public static final int CHAR_CARET = 0xe0000000 | '^'; public static final int CHAR_VERTICAL_BAR = 0xe0000000 | '|'; public static final int CHAR_AMPERSAND = 0xe0000000 | '&'; public static final int CHAR_HYPHEN = 0xe0000000 | '-'; public static final int CHAR_DOT = 0xe0000000 | '.'; public static final int QMOD_GREEDY = 0xe0000000; public static final int QMOD_RELUCTANT = 0xc0000000; public static final int QMOD_POSSESSIVE = 0x80000000; public static final int QUANT_STAR = QMOD_GREEDY | '*'; public static final int QUANT_STAR_P = QMOD_POSSESSIVE | '*'; public static final int QUANT_STAR_R = QMOD_RELUCTANT | '*'; public static final int QUANT_PLUS = QMOD_GREEDY | '+'; public static final int QUANT_PLUS_P = QMOD_POSSESSIVE | '+'; public static final int QUANT_PLUS_R = QMOD_RELUCTANT | '+'; public static final int QUANT_ALT = QMOD_GREEDY | '?'; public static final int QUANT_ALT_P = QMOD_POSSESSIVE | '?'; public static final int QUANT_ALT_R = QMOD_RELUCTANT | '?'; public static final int QUANT_COMP = QMOD_GREEDY | '{'; public static final int QUANT_COMP_P = QMOD_POSSESSIVE | '{'; public static final int QUANT_COMP_R = QMOD_RELUCTANT | '{'; public static final int CHAR_LEFT_PARENTHESIS = 0x80000000 | '('; public static final int CHAR_NONCAP_GROUP = 0xc0000000 | '('; public static final int CHAR_POS_LOOKAHEAD = 0xe0000000 | '('; public static final int CHAR_NEG_LOOKAHEAD = 0xf0000000 | '('; public static final int CHAR_POS_LOOKBEHIND = 0xf8000000 | '('; public static final int CHAR_NEG_LOOKBEHIND = 0xfc000000 | '('; public static final int CHAR_ATOMIC_GROUP = 0xfe000000 | '('; public static final int CHAR_FLAGS = 0xff000000 | '('; public static final int CHAR_START_OF_INPUT = 0x80000000 | 'A'; public static final int CHAR_WORD_BOUND = 0x80000000 | 'b'; public static final int CHAR_NONWORD_BOUND = 0x80000000 | 'B'; public static final int CHAR_PREVIOUS_MATCH = 0x80000000 | 'G'; public static final int CHAR_END_OF_INPUT = 0x80000000 | 'z'; public static final int CHAR_END_OF_LINE = 0x80000000 | 'Z'; public static final int MODE_PATTERN = 1 << 0; public static final int MODE_RANGE = 1 << 1; public static final int MODE_ESCAPE = 1 << 2; //maximum length of decomposition static final int MAX_DECOMPOSITION_LENGTH = 4; /* * maximum length of Hangul decomposition * note that MAX_HANGUL_DECOMPOSITION_LENGTH <= MAX_DECOMPOSITION_LENGTH */ static final int MAX_HANGUL_DECOMPOSITION_LENGTH = 3; /* * Following constants are needed for Hangul canonical decomposition. * Hangul decomposition algorithm and constants are taken according * to description at http://www.unicode.org/versions/Unicode4.0.0/ch03.pdf * "3.12 Conjoining Jamo Behavior" */ static final int SBase = 0xAC00; static final int LBase = 0x1100; static final int VBase = 0x1161; static final int TBase = 0x11A7; static final int SCount = 11172; static final int LCount = 19; static final int VCount = 21; static final int TCount = 28; static final int NCount = 588; //table that contains canonical decomposition mappings private static IntArrHash decompTable = null; //table that contains canonical combining classes private static IntHash canonClassesTable = null; private static int canonClassesTableSize; /* * Table that contains information about Unicode codepoints with * single codepoint decomposition */ private static IntHash singleDecompTable = null; private static int singleDecompTableSize; private char[] pattern = null; private int flags = 0; private int mode = 1; // when in literal mode, this field will save the previous one private int saved_mode = 0; // previous char read private int lookBack; //current character read private int ch; //next character private int lookAhead; //index of last char in pattern plus one private int patternFullLength = 0; // cur special token private SpecialToken curST = null; // next special token private SpecialToken lookAheadST = null; // cur char being processed private int index = 0; // previous non-whitespace character index; private int prevNW = 0; // cur token start index private int curToc = 0; // look ahead token index private int lookAheadToc = 0; // original string representing pattern private String orig = null; public Lexer(String pattern, int flags) { orig = pattern; if ((flags & Pattern.LITERAL) > 0) { pattern = Pattern.quote(pattern); } else if ((flags & Pattern.CANON_EQ) > 0) { pattern = Lexer.normalize(pattern); } this.pattern = new char[pattern.length() + 2]; System.arraycopy(pattern.toCharArray(), 0, this.pattern, 0, pattern.length()); this.pattern[this.pattern.length - 1] = 0; this.pattern[this.pattern.length - 2] = 0; patternFullLength = this.pattern.length; this.flags = flags; // read first two tokens; movePointer(); movePointer(); } /** * Returns current character w/o reading next one; if there are no more * characters returns 0; * * @return current character; */ public int peek() { return ch; } /** * Set the Lexer to PATTERN or RANGE mode; Lexer interpret character two * different ways in parser or range modes. * * @param mode * Lexer.PATTERN or Lexer.RANGE */ public void setMode(int mode) { if (mode > 0 && mode < 3) { this.mode = mode; } if (mode == Lexer.MODE_PATTERN) { reread(); } } /** * Restores flags for Lexer * * @param flags */ public void restoreFlags(int flags) { this.flags = flags; lookAhead = ch; lookAheadST = curST; //curToc is an index of closing bracket ) index = curToc + 1; lookAheadToc = curToc; movePointer(); } public SpecialToken peekSpecial() { return curST; } /** * Returns true, if current token is special, i.e. quantifier, or other * compound token. * * @return - true if current token is special, false otherwise. */ public boolean isSpecial() { return curST != null; } public boolean isQuantifier() { return isSpecial() && curST.getType() == SpecialToken.TOK_QUANTIFIER; } public boolean isNextSpecial() { return lookAheadST != null; } /** * Returns current character and moves string index to the next one; * */ public int next() { movePointer(); return lookBack; } /** * Returns current special token and moves string index to the next one; */ public SpecialToken nextSpecial() { SpecialToken res = curST; movePointer(); return res; } /** * Returns nest symbol read. */ public int lookAhead() { return lookAhead; } /** * Returns previous character. */ public int back() { return lookBack; } /** * Normalize given expression. * * @param input - expression to normalize * @return normalized expression. */ static String normalize(String input) { char [] inputChars = input.toCharArray(); int inputLength = inputChars.length; int resCodePointsIndex = 0; int inputCodePointsIndex = 0; int decompHangulIndex = 0; //codePoints of input int [] inputCodePoints = new int [inputLength]; //result of canonical decomposition of input int [] resCodePoints = new int [inputLength * MAX_DECOMPOSITION_LENGTH]; //current symbol's codepoint int ch; //current symbol's decomposition int [] decomp; //result of canonical and Hangul decomposition of input int [] decompHangul; //result of canonical decomposition of input in UTF-16 encoding StringBuffer result = new StringBuffer(); decompTable = HashDecompositions.getHashDecompositions(); canonClassesTable = CanClasses.getHashCanClasses(); canonClassesTableSize = canonClassesTable.size; singleDecompTable = SingleDecompositions.getHashSingleDecompositions(); singleDecompTableSize = singleDecompTable.size; for (int i = 0; i < inputLength; i += Character.charCount(ch)) { ch = Character.codePointAt(inputChars, i); inputCodePoints[inputCodePointsIndex++] = ch; } /* * Canonical decomposition based on mappings in decompTable */ for (int i = 0; i < inputCodePointsIndex; i++) { ch = inputCodePoints[i]; decomp = Lexer.getDecomposition(ch); if (decomp == null) { resCodePoints[resCodePointsIndex++] = ch; } else { int curSymbDecompLength = decomp.length; for (int j = 0; j < curSymbDecompLength; j++) { resCodePoints[resCodePointsIndex++] = decomp[j]; } } } /* * Canonical ordering. * See http://www.unicode.org/reports/tr15/#Decomposition for * details */ resCodePoints = Lexer.getCanonicalOrder(resCodePoints, resCodePointsIndex); /* * Decomposition for Hangul syllables. * See http://www.unicode.org/reports/tr15/#Hangul for * details */ decompHangul = new int [resCodePoints.length]; for (int i = 0; i < resCodePointsIndex; i++) { int curSymb = resCodePoints[i]; decomp = getHangulDecomposition(curSymb); if (decomp == null) { decompHangul[decompHangulIndex++] = curSymb; } else{ /* * Note that Hangul decompositions have length that is * equal 2 or 3. */ decompHangul[decompHangulIndex++] = decomp[0]; decompHangul[decompHangulIndex++] = decomp[1]; if (decomp.length == 3) { decompHangul[decompHangulIndex++] = decomp[2]; } } } /* * Translating into UTF-16 encoding */ for (int i = 0; i < decompHangulIndex; i++) { result.append(Character.toChars(decompHangul[i])); } return result.toString(); } /** * Rearrange codepoints according * to canonical order. * * @param inputInts - array that contains Unicode codepoints * @param length - index of last Unicode codepoint plus 1 * * @return array that contains rearranged codepoints. */ static int [] getCanonicalOrder(int [] inputInts, int length) { int inputLength = (length < inputInts.length) ? length : inputInts.length; /* * Simple bubble-sort algorithm. * Note that many codepoints have 0 * canonical class, so this algorithm works * almost lineary in overwhelming majority * of cases. This is due to specific of Unicode * combining classes and codepoints. */ for (int i = 1; i < inputLength; i++) { int j = i - 1; int iCanonicalClass = getCanonicalClass(inputInts[i]); int ch; if (iCanonicalClass == 0) { continue; } while (j > -1) { if (getCanonicalClass(inputInts[j]) > iCanonicalClass) { j = j - 1; } else { break; } } ch = inputInts [i]; for (int k = i; k > j + 1; k--) { inputInts[k] = inputInts [k - 1]; } inputInts[j + 1] = ch; } return inputInts; } /** * Reread current character, may be require if previous token changes mode * to one with different character interpretation. * */ private void reread() { lookAhead = ch; lookAheadST = curST; index = lookAheadToc; lookAheadToc = curToc; movePointer(); } /** * Moves pointer one position right; save current character to lookBack; * lookAhead to current one and finally read one more to lookAhead; */ private void movePointer() { // swap pointers lookBack = ch; ch = lookAhead; curST = lookAheadST; curToc = lookAheadToc; lookAheadToc = index; boolean reread; do { reread = false; // read next character analyze it and construct token: // // lookAhead = (index < pattern.length) ? nextCodePoint() : 0; lookAheadST = null; if (mode == Lexer.MODE_ESCAPE) { if (lookAhead == '\\') { //need not care about supplementary codepoints here lookAhead = (index < pattern.length) ? pattern[nextIndex()] : 0; switch (lookAhead) { case 'E': { mode = saved_mode; lookAhead = (index <= pattern.length - 2) ? nextCodePoint() : 0; break; } default: { lookAhead = '\\'; index = prevNW; return; } } } else { return; } } if (lookAhead == '\\') { lookAhead = (index < pattern.length - 2) ? nextCodePoint() : -1; switch (lookAhead) { case -1: throw new PatternSyntaxException( Messages.getString("regex.10"), this.toString(), index); //$NON-NLS-1$ case 'P': case 'p': { String cs = parseCharClassName(); boolean negative = false; if (lookAhead == 'P') negative = true; ; try { lookAheadST = AbstractCharClass.getPredefinedClass(cs, negative); } catch (MissingResourceException mre) { throw new PatternSyntaxException( Messages.getString("regex.11" //$NON-NLS-1$ , cs), this.toString(), index); } lookAhead = 0; break; } case 'w': case 's': case 'd': case 'W': case 'S': case 'D': { lookAheadST = CharClass.getPredefinedClass(new String( pattern, prevNW, 1), false); lookAhead = 0; break; } case 'Q': { saved_mode = mode; mode = Lexer.MODE_ESCAPE; reread = true; break; } case 't': lookAhead = '\t'; break; case 'n': lookAhead = '\n'; break; case 'r': lookAhead = '\r'; break; case 'f': lookAhead = '\f'; break; case 'a': lookAhead = '\u0007'; break; case 'e': lookAhead = '\u001B'; break; case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': { if (mode == Lexer.MODE_PATTERN) { lookAhead = 0x80000000 | lookAhead; } break; } case '0': lookAhead = readOctals(); break; case 'x': lookAhead = readHex("hexadecimal", 2); //$NON-NLS-1$ break; case 'u': lookAhead = readHex("Unicode", 4); //$NON-NLS-1$ break; case 'b': lookAhead = CHAR_WORD_BOUND; break; case 'B': lookAhead = CHAR_NONWORD_BOUND; break; case 'A': lookAhead = CHAR_START_OF_INPUT; break; case 'G': lookAhead = CHAR_PREVIOUS_MATCH; break; case 'Z': lookAhead = CHAR_END_OF_LINE; break; case 'z': lookAhead = CHAR_END_OF_INPUT; break; case 'c': { if (index < pattern.length - 2) { //need not care about supplementary codepoints here lookAhead = (pattern[nextIndex()] & 0x1f); break; } else { throw new PatternSyntaxException(Messages.getString("regex.12") //$NON-NLS-1$ , this.toString(), index); } } case 'C': case 'E': case 'F': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'R': case 'T': case 'U': case 'V': case 'X': case 'Y': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'o': case 'q': case 'y': throw new PatternSyntaxException(Messages.getString("regex.13") //$NON-NLS-1$ , this.toString(), index); default: break; } } else if (mode == Lexer.MODE_PATTERN) { switch (lookAhead) { case '+': case '*': case '?': { char mod = (index < pattern.length) ? pattern[index] : '*'; switch (mod) { case '+': { lookAhead = lookAhead | Lexer.QMOD_POSSESSIVE; nextIndex(); break; } case '?': { lookAhead = lookAhead | Lexer.QMOD_RELUCTANT; nextIndex(); break; } default: { lookAhead = lookAhead | Lexer.QMOD_GREEDY; break; } } break; } case '{': { lookAheadST = processQuantifier(lookAhead); break; } case '$': lookAhead = CHAR_DOLLAR; break; case '(': { if (pattern[index] == '?') { nextIndex(); char nonCap = pattern[index]; boolean behind = false; do { if (!behind) { switch (nonCap) { case '!': lookAhead = CHAR_NEG_LOOKAHEAD; nextIndex(); break; case '=': lookAhead = CHAR_POS_LOOKAHEAD; nextIndex(); break; case '>': lookAhead = CHAR_ATOMIC_GROUP; nextIndex(); break; case '<': { nextIndex(); nonCap = pattern[index]; behind = true; break; } default: { lookAhead = readFlags(); /* * We return res = res | 1 << 8 * from readFlags() if we read * (?idmsux-idmsux) */ if (lookAhead >= 256) { //Erase auxiliary bit lookAhead = (lookAhead & 0xff); flags = lookAhead; lookAhead = lookAhead << 16; lookAhead = CHAR_FLAGS | lookAhead; } else { flags = lookAhead; lookAhead = lookAhead << 16; lookAhead = CHAR_NONCAP_GROUP | lookAhead; } break; } } } else { behind = false; switch (nonCap) { case '!': lookAhead = CHAR_NEG_LOOKBEHIND; nextIndex(); break; case '=': lookAhead = CHAR_POS_LOOKBEHIND; nextIndex(); break; default: throw new PatternSyntaxException(Messages.getString("regex.14") //$NON-NLS-1$ , this.toString(), index); } } } while (behind); } else { lookAhead = CHAR_LEFT_PARENTHESIS; } break; } case ')': lookAhead = CHAR_RIGHT_PARENTHESIS; break; case '[': { lookAhead = CHAR_LEFT_SQUARE_BRACKET; setMode(Lexer.MODE_RANGE); break; } case ']': { if (mode == Lexer.MODE_RANGE) { lookAhead = CHAR_RIGHT_SQUARE_BRACKET; } break; } case '^': lookAhead = CHAR_CARET; break; case '|': lookAhead = CHAR_VERTICAL_BAR; break; case '.': lookAhead = CHAR_DOT; break; default: break; } } else if (mode == Lexer.MODE_RANGE) { switch (lookAhead) { case '[': lookAhead = CHAR_LEFT_SQUARE_BRACKET; break; case ']': lookAhead = CHAR_RIGHT_SQUARE_BRACKET; break; case '^': lookAhead = CHAR_CARET; break; case '&': lookAhead = CHAR_AMPERSAND; break; case '-': lookAhead = CHAR_HYPHEN; break; default: break; } } } while (reread); } /** * Parse character classes names and verifies correction of the syntax; */ private String parseCharClassName() { StringBuffer sb = new StringBuffer(10); if (index < pattern.length - 2) { // one symbol family if (pattern[index] != '{') { return "Is" + new String(pattern, nextIndex(), 1); //$NON-NLS-1$ } nextIndex(); char ch = 0; while (index < pattern.length - 2 && (ch = pattern[nextIndex()]) != '}') { sb.append((char) ch); } if (ch != '}') throw new PatternSyntaxException( Messages.getString("regex.15"), this //$NON-NLS-1$ .toString(), index); } if (sb.length() == 0) throw new PatternSyntaxException( Messages.getString("regex.16"), this.toString(), //$NON-NLS-1$ index); String res = sb.toString(); if (res.length() == 1) return "Is" + res; //$NON-NLS-1$ return (res.length() > 3 && (res.startsWith("Is") || res //$NON-NLS-1$ .startsWith("In"))) ? res.substring(2) : res; //$NON-NLS-1$ } /** * Process given character in assumption that it's quantifier. */ private Quantifier processQuantifier(int ch) { StringBuffer sb = new StringBuffer(4); int min = -1; int max = Integer.MAX_VALUE; while (index < pattern.length && (ch = pattern[nextIndex()]) != '}') { if (ch == ',' && min < 0) { try { min = Integer.parseInt(sb.toString(), 10); sb.delete(0, sb.length()); } catch (NumberFormatException nfe) { throw new PatternSyntaxException( Messages.getString("regex.17"), this //$NON-NLS-1$ .toString(), index); } } else { sb.append((char) ch); } } if (ch != '}') { throw new PatternSyntaxException( Messages.getString("regex.17"), //$NON-NLS-1$ this.toString(), index); } if (sb.length() > 0) { try { max = Integer.parseInt(sb.toString(), 10); if (min < 0) min = max; } catch (NumberFormatException nfe) { throw new PatternSyntaxException( Messages.getString("regex.17"), this //$NON-NLS-1$ .toString(), index); } } else if (min < 0) { throw new PatternSyntaxException( Messages.getString("regex.17"), //$NON-NLS-1$ this.toString(), index); } if ((min | max | max - min) < 0) { throw new PatternSyntaxException( Messages.getString("regex.17"), //$NON-NLS-1$ this.toString(), index); } char mod = (index < pattern.length) ? pattern[index] : '*'; switch (mod) { case '+': lookAhead = Lexer.QUANT_COMP_P; nextIndex(); break; case '?': lookAhead = Lexer.QUANT_COMP_R; nextIndex(); break; default: lookAhead = Lexer.QUANT_COMP; break; } return new Quantifier(min, max); } public String toString() { return orig; } /** * Checks if there are any characters in the pattern. * * @return true if there are no more characters in the pattern. */ public boolean isEmpty() { return ch == 0 && lookAhead == 0 && index == patternFullLength && !isSpecial(); } /** * Returns true if current character is plain token. */ public static boolean isLetter(int ch) { //all supplementary codepoints have integer value that is >= 0; return ch >= 0; } /** * Return true if current character is letter, false otherwise; This is * shortcut to static method isLetter to check the current character. * * @return true if current character is letter, false otherwise */ public boolean isLetter() { return !isEmpty() && !isSpecial() && isLetter(ch); } /* * Note that Character class methods * isHighSurrogate(), isLowSurrogate() * take char parameter while we need an int * parameter without truncation to char value */ public boolean isHighSurrogate() { return (ch <= 0xDBFF) && (ch >= 0xD800); } public boolean isLowSurrogate() { return (ch <= 0xDFFF) && (ch >= 0xDC00); } public static boolean isHighSurrogate(int ch) { return (ch <= 0xDBFF) && (ch >= 0xD800); } public static boolean isLowSurrogate(int ch) { return (ch <= 0xDFFF) && (ch >= 0xDC00); } /** * Process hexadecimal integer. */ private int readHex(String radixName, int max) { StringBuffer st = new StringBuffer(max); int length = pattern.length - 2; int i; for (i = 0; i < max && index < length; i++) { st.append((char) pattern[nextIndex()]); } if (i == max) { try { return Integer.parseInt(st.toString(), 16); } catch (NumberFormatException nfe) { } } throw new PatternSyntaxException(Messages.getString("regex.18", radixName) //$NON-NLS-1$ , this.toString(), index); } /** * Process octal integer. */ private int readOctals() { char ch; int max = 3; int i = 1; int first; int res; int length = pattern.length - 2; switch (first = Character.digit((ch = pattern[index]), 8)) { case -1: throw new PatternSyntaxException(Messages.getString("regex.19") //$NON-NLS-1$ , this.toString(), index); default: { if (first > 3) max--; nextIndex(); res = first; } } while (i < max && index < length && (first = Character.digit((ch = pattern[index]), 8)) >= 0) { res = res * 8 + first; nextIndex(); i++; } return res; } /** * Process expression flags given with (?idmsux-idmsux) */ private int readFlags() { char ch; boolean pos = true; int res = flags; while (index < pattern.length) { ch = pattern[index]; switch (ch) { case '-': if (!pos) { throw new PatternSyntaxException(Messages.getString("regex.1A") //$NON-NLS-1$ , this.toString(), index); } pos = false; break; case 'i': res = pos ? res | Pattern.CASE_INSENSITIVE : (res ^ Pattern.CASE_INSENSITIVE) & res; break; case 'd': res = pos ? res | Pattern.UNIX_LINES : (res ^ Pattern.UNIX_LINES) & res; break; case 'm': res = pos ? res | Pattern.MULTILINE : (res ^ Pattern.MULTILINE) & res; break; case 's': res = pos ? res | Pattern.DOTALL : (res ^ Pattern.DOTALL) & res; break; case 'u': res = pos ? res | Pattern.UNICODE_CASE : (res ^ Pattern.UNICODE_CASE) & res; break; case 'x': res = pos ? res | Pattern.COMMENTS : (res ^ Pattern.COMMENTS) & res; break; case ':': nextIndex(); return res; case ')': nextIndex(); return res | (1 << 8); default: // ignore invalid flags (HARMONY-2127) } nextIndex(); } throw new PatternSyntaxException(Messages.getString("regex.1A"), //$NON-NLS-1$ this.toString(), index); } /** * Returns next character index to read and moves pointer to the next one. * If comments flag is on this method will skip comments and whitespaces. * * The following actions are equivalent if comments flag is off ch = * pattern[index++] == ch = pattern[nextIndex] * * @return next character index to read. */ private int nextIndex() { prevNW = index; if ((flags & Pattern.COMMENTS) != 0) { skipComments(); } else { index++; } return prevNW; } /** * Skips comments and whitespaces */ private int skipComments() { int length = pattern.length - 2; index++; do { while (index < length && Character.isWhitespace(pattern[index])) index++; if (index < length && pattern[index] == '#') { index++; while (index < length && !isLineSeparator(pattern[index])) index++; } else return index; } while (true); } private boolean isLineSeparator(int ch) { return (ch == '\n' || ch == '\r' || ch == '\u0085' || (ch | 1) == '\u2029'); } /** * Gets decomposition for given codepoint from * decomposition mappings table. * * @param ch - Unicode codepoint * @return array of codepoints that is a canonical * decomposition of ch. */ static int [] getDecomposition(int ch) { return decompTable.get(ch); } /** * Gets decomposition for given Hangul syllable. * This is an implementation of Hangul decomposition algorithm * according to http://www.unicode.org/versions/Unicode4.0.0/ch03.pdf * "3.12 Conjoining Jamo Behavior". * * @param ch - given Hangul syllable * @return canonical decomposition of ch. */ static int [] getHangulDecomposition(int ch) { int SIndex = ch - SBase; if (SIndex < 0 || SIndex >= SCount) { return null; } else { int L = LBase + SIndex / NCount; int V = VBase + (SIndex % NCount) / TCount; int T = SIndex % TCount; int decomp []; if (T == 0) { decomp = new int [] {L, V}; } else { T = TBase + T; decomp = new int [] {L, V, T}; } return decomp; } } /** * Gets canonical class for given codepoint from * decomposition mappings table. * * @param - ch Unicode codepoint * @return canonical class for given Unicode codepoint * that is represented by ch. */ static int getCanonicalClass(int ch) { int canClass = canonClassesTable.get(ch); return (canClass == canonClassesTableSize) ? 0 : canClass; } /** * Tests if given codepoint is a canonical decomposition of another * codepoint. * * @param ch - codepoint to test * @return true if ch is a decomposition. */ static boolean hasSingleCodepointDecomposition(int ch) { int hasSingleDecomp = singleDecompTable.get(ch); /* * singleDecompTable doesn't contain ch * == (hasSingleDecomp == singleDecompTableSize) */ return (hasSingleDecomp == singleDecompTableSize) ? false : true; } /** * Tests if given codepoint has canonical decomposition * and given codepoint's canonical class is not 0. * * @param ch - codepoint to test * @return true if canonical class is not 0 and ch has a decomposition. */ static boolean hasDecompositionNonNullCanClass(int ch) { return ch == 0x0340 | ch == 0x0341 | ch == 0x0343 | ch == 0x0344; } private int nextCodePoint() { char high = pattern[nextIndex()]; if (Character.isHighSurrogate(high)) { //low and high char may be delimited by spaces int lowExpectedIndex = prevNW + 1; if (lowExpectedIndex < pattern.length) { char low = pattern[lowExpectedIndex]; if (Character.isLowSurrogate(low)) { nextIndex(); return Character.toCodePoint(high, low); } } } return (int) high; } /** * Tests Unicode codepoint if it is a boundary * of decomposed Unicode codepoint. * * @param ch - Unicode codepoint to test * @return true if given codepoint is a boundary. */ static boolean isDecomposedCharBoundary(int ch) { int canClass = canonClassesTable.get(ch); //Lexer.getCanonicalClass(ch) == 0 boolean isBoundary = (canClass == canonClassesTableSize); return isBoundary; } /** * Returns the curr. character index. */ public int getIndex() { return curToc; } }
apache-2.0
darranl/directory-shared
ldap/model/src/main/java/org/apache/directory/api/ldap/model/message/SearchParams.java
9380
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.api.ldap.model.message; import java.util.HashSet; import java.util.Set; import javax.naming.directory.SearchControls; import org.apache.directory.api.ldap.model.exception.LdapException; import org.apache.directory.api.ldap.model.schema.AttributeType; import org.apache.directory.api.ldap.model.schema.AttributeTypeOptions; import org.apache.directory.api.ldap.model.schema.SchemaManager; import org.apache.directory.api.ldap.model.schema.SchemaUtils; import org.apache.directory.api.util.Strings; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A container for Search parameters. It replaces the SearchControls. * * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a> */ public class SearchParams { /** The LoggerFactory used by this class */ private static final Logger LOG = LoggerFactory.getLogger( SearchParams.class ); /** The search scope. Default to OBJECT */ private SearchScope scope = SearchScope.OBJECT; /** The time limit. Default to 0 (infinite) */ private int timeLimit = 0; /** The size limit. Default to 0 (infinite) */ private long sizeLimit = 0; /** If we should return only types. Default to false */ private boolean typesOnly = false; /** The aliasDerefMode. Default to DEREF_ALWAYS */ private AliasDerefMode aliasDerefMode = AliasDerefMode.DEREF_ALWAYS; /** The list of attributes to return, as Strings. Default to an empty set */ private Set<String> returningAttributesStr; /** The list of attributes to return, once it has been normalized. Default to an empty set */ private Set<AttributeTypeOptions> returningAttributes; /** The set of controls for this search. Default to an empty set */ private Set<Control> controls; /** * Creates a new instance of SearchContext, with all the values set to * default. */ public SearchParams() { returningAttributes = new HashSet<AttributeTypeOptions>(); returningAttributesStr = new HashSet<String>(); controls = new HashSet<Control>(); } /** * @return the scope */ public SearchScope getScope() { return scope; } /** * @param scope the scope to set */ public void setScope( SearchScope scope ) { this.scope = scope; } /** * @return the timeLimit */ public int getTimeLimit() { return timeLimit; } /** * @param timeLimit the timeLimit to set */ public void setTimeLimit( int timeLimit ) { this.timeLimit = timeLimit; } /** * @return the sizeLimit */ public long getSizeLimit() { return sizeLimit; } /** * @param sizeLimit the sizeLimit to set */ public void setSizeLimit( long sizeLimit ) { this.sizeLimit = sizeLimit; } /** * @return the typesOnly */ public boolean isTypesOnly() { return typesOnly; } /** * @param typesOnly the typesOnly to set */ public void setTypesOnly( boolean typesOnly ) { this.typesOnly = typesOnly; } /** * @return the aliasDerefMode */ public AliasDerefMode getAliasDerefMode() { return aliasDerefMode; } /** * @param aliasDerefMode the aliasDerefMode to set */ public void setAliasDerefMode( AliasDerefMode aliasDerefMode ) { this.aliasDerefMode = aliasDerefMode; } /** * @return the returningAttributes */ public Set<AttributeTypeOptions> getReturningAttributes() { return returningAttributes; } /** * @return the returningAttributes */ public Set<String> getReturningAttributesStr() { return returningAttributesStr; } /** * Normalize the ReturningAttributes. It reads all the String from the returningAttributesString, * and grab the associated AttributeType from the schema to store it into the returningAttributes * Set. * * @param schemaManager The schema manager */ public void normalize( SchemaManager schemaManager ) { for ( String returnAttribute : returningAttributesStr ) { try { String id = SchemaUtils.stripOptions( returnAttribute ); Set<String> options = SchemaUtils.getOptions( returnAttribute ); AttributeType attributeType = schemaManager.lookupAttributeTypeRegistry( id ); AttributeTypeOptions attrOptions = new AttributeTypeOptions( attributeType, options ); returningAttributes.add( attrOptions ); } catch ( LdapException ne ) { LOG.warn( "Requested attribute {} does not exist in the schema, it will be ignored", returnAttribute ); // Unknown attributes should be silently ignored, as RFC 2251 states } } } /** * @param returningAttributes the returningAttributes to set */ public void setReturningAttributes( String... returningAttributes ) { if ( returningAttributes != null ) { for ( String returnAttribute : returningAttributes ) { this.returningAttributesStr.add( returnAttribute ); } } } /** * @param returningAttribute the returningAttributes to add */ public void addReturningAttributes( String returningAttribute ) { this.returningAttributesStr.add( returningAttribute ); } /** * @return the controls */ public Set<Control> getControls() { return controls; } /** * @param controls the controls to set */ public void setControls( Set<Control> controls ) { this.controls = controls; } /** * @param control the controls to set */ public void addControl( Control control ) { this.controls.add( control ); } /** * Creates a {@link SearchParams} from JNDI search controls. * * @param searchControls the search controls * @param aliasDerefMode the alias deref mode * @return the search params */ public static SearchParams toSearchParams( SearchControls searchControls, AliasDerefMode aliasDerefMode ) { SearchParams searchParams = new SearchParams(); searchParams.setAliasDerefMode( aliasDerefMode ); searchParams.setTimeLimit( searchControls.getTimeLimit() ); searchParams.setSizeLimit( searchControls.getCountLimit() ); searchParams.setScope( SearchScope.getSearchScope( searchControls.getSearchScope() ) ); searchParams.setTypesOnly( searchControls.getReturningObjFlag() ); if ( searchControls.getReturningAttributes() != null ) { for ( String returningAttribute : searchControls.getReturningAttributes() ) { searchParams.addReturningAttributes( returningAttribute ); } } return searchParams; } /** * {@inheritDoc} */ public String toString() { StringBuilder sb = new StringBuilder(); sb.append( "Search parameters :\n" ); sb.append( " scope : " ).append( scope ).append( "\n" ); sb.append( " Alias dereferencing : " ).append( aliasDerefMode ).append( "\n" ); sb.append( " types only : " ).append( typesOnly ).append( "\n" ); if ( returningAttributesStr.size() != 0 ) { sb.append( " returning attributes : " ).append( Strings.setToString( returningAttributesStr ) ) .append( "\n" ); } if ( timeLimit > 0 ) { sb.append( " timeLimit : " ).append( timeLimit ).append( "\n" ); } else { sb.append( " no timeLimit\n" ); } if ( timeLimit > 0 ) { sb.append( " sizeLimit : " ).append( sizeLimit ).append( "\n" ); } else { sb.append( " no sizeLimit\n" ); } if ( controls.size() != 0 ) { for ( Control control : controls ) { sb.append( " control : " ). append( control.getOid() ).append( "/" ). append( control.getClass().getName() ).append( "\n" ); } } return sb.toString(); } }
apache-2.0
yuangang123/Eachother
app/src/main/java/info/LiuyanArrayAdapter.java
2431
//package info; // //import android.support.v7.widget.RecyclerView; //import android.view.LayoutInflater; //import android.view.View; //import android.view.ViewGroup; //import android.widget.TextView; //import android.widget.Toast; // //import com.example.eachother.R; // //import java.util.ArrayList; //import java.util.List; // ///** // * Created by 袁刚 on 2017/5/15. // */ // //public class LiuyanArrayAdapter extends RecyclerView.Adapter<LiuyanArrayAdapter.ViewHoder> { // // public List<Liuyan> liuyanList = new ArrayList<>(); // // static class ViewHoder extends RecyclerView.ViewHolder{ // View liuyan; // TextView liuyanuser; // TextView liuyancontent; // // public ViewHoder(View itemView) { // super(itemView); // this.liuyan= itemView; // this.liuyanuser = (TextView) itemView.findViewById(R.id.liuyan_user_item); // this.liuyancontent=(TextView)itemView.findViewById(R.id.liuyan_content_item); // } // } // // public LiuyanArrayAdapter(List<Liuyan> liuyanList) { // this.liuyanList = liuyanList; // } // // @Override // public ViewHoder onCreateViewHolder(final ViewGroup parent, int viewType) { //// return null;return // View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.liuyan_item,parent,false); // ViewHoder viewHoder = new ViewHoder(view); // // /** // * 为留言中添加点击功能 // */ // // viewHoder.liuyan.setOnClickListener(new View.OnClickListener() { // @Override // public void onClick(View view) { // // } // }); // // viewHoder.liuyanuser.setOnClickListener(new View.OnClickListener() { // @Override // public void onClick(View view) { // Toast.makeText(parent.getContext(),"将会跳入用户详细界面,不过暂时没有开通。",Toast.LENGTH_SHORT).show(); // } // }); // // return viewHoder; // } // // @Override // public void onBindViewHolder(ViewHoder holder, int position) { // Liuyan liuyan= liuyanList.get(position); // holder.liuyanuser.setText(liuyan.getLiuyaner()); // holder.liuyancontent.setText(liuyan.getLiuyancontent()); // // } // // @Override // public int getItemCount() { //// return 0; // return liuyanList.size(); // } //}
apache-2.0
kebernet/skillz
api/src/main/java/net/kebernet/skillz/annotation/Launched.java
1097
/* * Copyright (c) 2016 Robert Cooper * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.kebernet.skillz.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Inherited; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * An annotation for a method called when a skill is launched by the user for * continuous interactive mode. */ @Retention(RetentionPolicy.RUNTIME) @Inherited @Target(ElementType.METHOD) public @interface Launched { }
apache-2.0
millecker/senti-storm
src/at/illecker/sentistorm/spout/TwitterStreamSpout.java
5377
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package at.illecker.sentistorm.spout; import java.util.Map; import java.util.concurrent.LinkedBlockingQueue; import twitter4j.FilterQuery; import twitter4j.StallWarning; import twitter4j.Status; import twitter4j.StatusDeletionNotice; import twitter4j.StatusListener; import twitter4j.TwitterStream; import twitter4j.TwitterStreamFactory; import twitter4j.auth.AccessToken; import twitter4j.conf.ConfigurationBuilder; import at.illecker.sentistorm.commons.util.TimeUtils; import backtype.storm.Config; import backtype.storm.spout.SpoutOutputCollector; import backtype.storm.task.TopologyContext; import backtype.storm.topology.OutputFieldsDeclarer; import backtype.storm.topology.base.BaseRichSpout; import backtype.storm.tuple.Fields; import backtype.storm.tuple.Values; public class TwitterStreamSpout extends BaseRichSpout { public static final String ID = "twitter-stream-spout"; public static final String CONF_STARTUP_SLEEP_MS = ID + ".startup.sleep.ms"; private static final long serialVersionUID = -4657730220755697034L; private SpoutOutputCollector m_collector; private LinkedBlockingQueue<Status> m_tweetsQueue = null; private TwitterStream m_twitterStream; private String m_consumerKey; private String m_consumerSecret; private String m_accessToken; private String m_accessTokenSecret; private String[] m_keyWords; private String m_filterLanguage; public TwitterStreamSpout(String consumerKey, String consumerSecret, String accessToken, String accessTokenSecret, String[] keyWords, String filterLanguage) { this.m_consumerKey = consumerKey; this.m_consumerSecret = consumerSecret; this.m_accessToken = accessToken; this.m_accessTokenSecret = accessTokenSecret; this.m_keyWords = keyWords; this.m_filterLanguage = filterLanguage; // "en" } @Override public void declareOutputFields(OutputFieldsDeclarer declarer) { // key of output tuples declarer.declare(new Fields("id", "text", "score")); } @Override public void open(Map config, TopologyContext context, SpoutOutputCollector collector) { m_collector = collector; m_tweetsQueue = new LinkedBlockingQueue<Status>(1000); // Optional startup sleep to finish bolt preparation // before spout starts emitting if (config.get(CONF_STARTUP_SLEEP_MS) != null) { long startupSleepMillis = (Long) config.get(CONF_STARTUP_SLEEP_MS); TimeUtils.sleepMillis(startupSleepMillis); } TwitterStream twitterStream = new TwitterStreamFactory( new ConfigurationBuilder().setJSONStoreEnabled(true).build()) .getInstance(); // Set Listener twitterStream.addListener(new StatusListener() { @Override public void onStatus(Status status) { m_tweetsQueue.offer(status); // add tweet into queue } @Override public void onException(Exception arg0) { } @Override public void onDeletionNotice(StatusDeletionNotice arg0) { } @Override public void onScrubGeo(long arg0, long arg1) { } @Override public void onStallWarning(StallWarning arg0) { } @Override public void onTrackLimitationNotice(int arg0) { } }); // Set credentials twitterStream.setOAuthConsumer(m_consumerKey, m_consumerSecret); AccessToken token = new AccessToken(m_accessToken, m_accessTokenSecret); twitterStream.setOAuthAccessToken(token); // Filter twitter stream FilterQuery tweetFilterQuery = new FilterQuery(); if (m_keyWords != null) { tweetFilterQuery.track(m_keyWords); } // Filter location // https://dev.twitter.com/docs/streaming-apis/parameters#locations tweetFilterQuery.locations(new double[][] { new double[] { -180, -90, }, new double[] { 180, 90 } }); // any geotagged tweet // Filter language tweetFilterQuery.language(new String[] { m_filterLanguage }); twitterStream.filter(tweetFilterQuery); } @Override public void nextTuple() { Status tweet = m_tweetsQueue.poll(); if (tweet == null) { TimeUtils.sleepMillis(50); // sleep 50 ms } else { // Emit tweet m_collector.emit(new Values(tweet.getId(), tweet.getText(), null)); } } @Override public void close() { m_twitterStream.shutdown(); } @Override public Map<String, Object> getComponentConfiguration() { Config ret = new Config(); ret.setMaxTaskParallelism(1); return ret; } @Override public void ack(Object id) { } @Override public void fail(Object id) { } }
apache-2.0
ivanntis/MeMap
MeMap/src/com/gizmoideas/mobile/memap/service/StartServiceReceiver.java
654
package com.gizmoideas.mobile.memap.service; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; public class StartServiceReceiver extends BroadcastReceiver { @Override public void onReceive(Context context, Intent intent) { //PositionReceiver position =new PositionReceiver(); //PositionReceiver position =new PositionReceiver(context); if (intent.getAction().equals("android.intent.action.BOOT_COMPLETED")) { //position.setPosition(context); //position.setPosition(); PositionReceiver position =new PositionReceiver(context); } } }
apache-2.0
alvindaiyan/cassava
src/test/java/com/cassava/core/dao/basetest/ExampleDAO.java
788
package com.cassava.core.dao.basetest; import com.cassava.core.dao.BaseDAO; import java.util.UUID; import java.util.concurrent.ExecutionException; /** * Created by yan.dai on 5/11/2015. */ public class ExampleDAO extends BaseDAO<UUID, Example> { private static ExampleDAO INSTANCE = null; ExampleDAO(Class<Example> type, String keyspace) { super(type, keyspace); } public static ExampleDAO get(String keyspace) { if(INSTANCE == null) { synchronized (ExampleDAO.class) { if (INSTANCE == null) INSTANCE = new ExampleDAO(Example.class, keyspace); } } return INSTANCE; } public Example getFromCache(UUID key) throws ExecutionException { return this.cache.get(key); } }
apache-2.0
NationalSecurityAgency/ghidra
Ghidra/Framework/SoftwareModeling/src/main/java/ghidra/program/model/block/CodeBlockIterator.java
2339
/* ### * IP: GHIDRA * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.program.model.block; import java.util.Iterator; import ghidra.util.exception.CancelledException; import ghidra.util.task.TaskMonitor; import util.CollectionUtils; /** * An iterator interface over CodeBlocks. * * <P>Note: this iterator is also {@link Iterable}. The {@link #hasNext()} and {@link #next()} * methods of this interface throw a {@link CancelledException} if the monitor is cancelled. The * iterator returned from {@link #iterator()} does <b>not</b> throw a cancelled exception. If * you need to know the cancelled state of this iterator, then you must check the cancelled state * of the monitor passed into this iterator via the {@link CodeBlockModel}. See * {@link TaskMonitor#isCancelled()}. * * @see ghidra.program.model.block.CodeBlock * @see CollectionUtils#asIterable */ public interface CodeBlockIterator extends Iterable<CodeBlock> { /** * Return true if next() will return a CodeBlock. * @return true if next() will return a CodeBlock. * @throws CancelledException thrown if the operation is cancelled. */ public boolean hasNext() throws CancelledException; /** * Return the next CodeBlock. * @return the next CodeBlock. * @throws CancelledException thrown if the operation is cancelled. */ public CodeBlock next() throws CancelledException; @Override default Iterator<CodeBlock> iterator() { return new Iterator<>() { @Override public boolean hasNext() { try { return CodeBlockIterator.this.hasNext(); } catch (CancelledException e) { return false; } } @Override public CodeBlock next() { try { return CodeBlockIterator.this.next(); } catch (CancelledException e) { return null; } } }; } }
apache-2.0
tremechus/sqlboa
src/main/java/sqlboa/db/DBConnection.java
507
package sqlboa.db; import sqlboa.Configuration; import sqlboa.model.ResultRow; import sqlboa.model.SqlParam; import sqlboa.model.StatementResult; import java.sql.*; import java.util.ArrayList; import java.util.List; public interface DBConnection { public List<String> list(String sql) throws SQLException; public void exec(String sql) throws SQLException; public StatementResult exec(String sql, List<SqlParam> bindParams) throws SQLException; public String getName(); public boolean isOK(); }
apache-2.0
mdaniel/intellij-community
java/debugger/impl/src/com/intellij/debugger/engine/DebugProcessEvents.java
29015
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.debugger.engine; import com.intellij.debugger.*; import com.intellij.debugger.engine.events.DebuggerCommandImpl; import com.intellij.debugger.engine.events.SuspendContextCommandImpl; import com.intellij.debugger.engine.requests.LocatableEventRequestor; import com.intellij.debugger.engine.requests.MethodReturnValueWatcher; import com.intellij.debugger.engine.requests.RequestManagerImpl; import com.intellij.debugger.impl.DebuggerSession; import com.intellij.debugger.impl.DebuggerUtilsAsync; import com.intellij.debugger.impl.DebuggerUtilsImpl; import com.intellij.debugger.impl.PrioritizedTask; import com.intellij.debugger.jdi.ThreadReferenceProxyImpl; import com.intellij.debugger.jdi.VirtualMachineProxyImpl; import com.intellij.debugger.memory.agent.MemoryAgentUtil; import com.intellij.debugger.requests.ClassPrepareRequestor; import com.intellij.debugger.requests.Requestor; import com.intellij.debugger.settings.DebuggerSettings; import com.intellij.debugger.ui.breakpoints.Breakpoint; import com.intellij.debugger.ui.breakpoints.InstrumentationTracker; import com.intellij.debugger.ui.breakpoints.StackCapturingLineBreakpoint; import com.intellij.debugger.ui.overhead.OverheadProducer; import com.intellij.debugger.ui.overhead.OverheadTimings; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.extensions.ExtensionPointListener; import com.intellij.openapi.extensions.PluginDescriptor; import com.intellij.openapi.progress.ProcessCanceledException; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.MessageType; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Factory; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.registry.Registry; import com.intellij.util.ConcurrencyUtil; import com.intellij.util.Consumer; import com.intellij.util.concurrency.AppExecutorUtil; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.ui.UIUtil; import com.intellij.xdebugger.XDebugSession; import com.intellij.xdebugger.breakpoints.XBreakpoint; import com.intellij.xdebugger.impl.XDebugSessionImpl; import com.intellij.xdebugger.impl.XDebuggerManagerImpl; import com.intellij.xdebugger.impl.ui.XDebugSessionTab; import com.jetbrains.jdi.ThreadReferenceImpl; import com.sun.jdi.*; import com.sun.jdi.event.*; import com.sun.jdi.request.EventRequest; import com.sun.jdi.request.EventRequestManager; import one.util.streamex.StreamEx; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.*; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; /** * @author lex */ public class DebugProcessEvents extends DebugProcessImpl { private static final Logger LOG = Logger.getInstance(DebugProcessEvents.class); private static final String REQUEST_HANDLER = "REQUEST_HANDLER"; private Map<VirtualMachine, DebuggerEventThread> myEventThreads = new HashMap<>(); public DebugProcessEvents(Project project) { super(project); DebuggerSettings.getInstance().addCapturePointsSettingsListener(this::createStackCapturingBreakpoints, myDisposable); } @Override protected void commitVM(final VirtualMachine vm) { super.commitVM(vm); if (vm != null) { vmAttached(); if (vm.canBeModified()) { Factory<DebuggerEventThread> createEventThread = () -> new DebuggerEventThread(); DebuggerEventThread eventThread = ContainerUtil.getOrCreate(myEventThreads, vm, createEventThread); ApplicationManager.getApplication().executeOnPooledThread( ConcurrencyUtil.underThreadNameRunnable("DebugProcessEvents", eventThread)); } } } private static void showStatusText(DebugProcessEvents debugProcess, Event event) { Requestor requestor = RequestManagerImpl.findRequestor(event.request()); Breakpoint breakpoint = null; if(requestor instanceof Breakpoint) { breakpoint = (Breakpoint)requestor; } String text = debugProcess.getEventText(Pair.create(breakpoint, event)); debugProcess.showStatusText(text); } public @Nls String getEventText(Pair<Breakpoint, Event> descriptor) { String text = ""; final Event event = descriptor.getSecond(); final Breakpoint breakpoint = descriptor.getFirst(); if (event instanceof LocatableEvent) { try { text = breakpoint != null ? breakpoint.getEventMessage(((LocatableEvent)event)) : JavaDebuggerBundle .message("status.generic.breakpoint.reached"); } catch (InternalException e) { text = JavaDebuggerBundle.message("status.generic.breakpoint.reached"); } } else if (event instanceof VMStartEvent) { text = JavaDebuggerBundle.message("status.process.started"); } else if (event instanceof VMDeathEvent) { text = JavaDebuggerBundle.message("status.process.terminated"); } else if (event instanceof VMDisconnectEvent) { text = JavaDebuggerBundle.message("status.disconnected", DebuggerUtilsImpl.getConnectionDisplayName(getConnection())); } return text; } private class DebuggerEventThread implements Runnable { private final VirtualMachineProxyImpl myVmProxy; private final DebuggerManagerThreadImpl myDebuggerManagerThread; DebuggerEventThread() { myVmProxy = getVirtualMachineProxy(); myDebuggerManagerThread = getManagerThread(); } private boolean myIsStopped = false; public synchronized void stopListening() { myIsStopped = true; } private synchronized boolean isStopped() { return myIsStopped; } @Override public void run() { try { EventQueue eventQueue = myVmProxy.eventQueue(); while (!isStopped()) { try { final EventSet eventSet = eventQueue.remove(); myDebuggerManagerThread.invokeAndWait(new DebuggerCommandImpl(PrioritizedTask.Priority.HIGH) { @Override protected void action() { int processed = 0; for (Event event : eventSet) { if (myReturnValueWatcher != null && myReturnValueWatcher.isTrackingEnabled()) { if (myReturnValueWatcher.processEvent(event)) { processed++; continue; } } Consumer<? super Event> handler = getEventRequestHandler(event); if (handler != null) { handler.consume(event); processed++; } } if (processed == eventSet.size()) { DebuggerUtilsAsync.resume(eventSet); return; } LocatableEvent locatableEvent = getLocatableEvent(eventSet); if (eventSet.suspendPolicy() == EventRequest.SUSPEND_ALL) { // check if there is already one request with policy SUSPEND_ALL for (SuspendContextImpl context : getSuspendManager().getEventContexts()) { if (context.getSuspendPolicy() == EventRequest.SUSPEND_ALL) { if (isResumeOnlyCurrentThread() && locatableEvent != null && !context.isEvaluating()) { // if step event is present - switch context getSuspendManager().resume(context); //((SuspendManagerImpl)getSuspendManager()).popContext(context); continue; } if (!DebuggerSession.enableBreakpointsDuringEvaluation()) { notifySkippedBreakpoints(locatableEvent, true); DebuggerUtilsAsync.resume(eventSet); return; } } } } if (!isCurrentVirtualMachine(myVmProxy)) { notifySkippedBreakpoints(locatableEvent, false); DebuggerUtilsAsync.resume(eventSet); return; } SuspendContextImpl suspendContext = null; if (isResumeOnlyCurrentThread() && locatableEvent != null) { for (SuspendContextImpl context : getSuspendManager().getEventContexts()) { ThreadReferenceProxyImpl threadProxy = getVirtualMachineProxy().getThreadReferenceProxy(locatableEvent.thread()); if (context.getSuspendPolicy() == EventRequest.SUSPEND_ALL && context.isExplicitlyResumed(threadProxy)) { context.myResumedThreads.remove(threadProxy); suspendContext = context; suspendContext.myVotesToVote = eventSet.size(); break; } } } if (suspendContext == null) { suspendContext = getSuspendManager().pushSuspendContext(eventSet); } Set<ClassPrepareRequestor> notifiedClassPrepareEventRequestors = null; ReferenceType lastPreparedClass = null; for (Event event : eventSet) { if (getEventRequestHandler(event) != null) { // handled before getSuspendManager().voteResume(suspendContext); continue; } //if (LOG.isDebugEnabled()) { // LOG.debug("EVENT : " + event); //} try { if (event instanceof VMStartEvent) { //Sun WTK fails when J2ME when event set is resumed on VMStartEvent processVMStartEvent(suspendContext, (VMStartEvent)event); } else if (event instanceof VMDeathEvent || event instanceof VMDisconnectEvent) { processVMDeathEvent(suspendContext, event); } else if (event instanceof ClassPrepareEvent) { if (notifiedClassPrepareEventRequestors == null) { notifiedClassPrepareEventRequestors = new HashSet<>(eventSet.size()); } ReferenceType type = ((ClassPrepareEvent)event).referenceType(); if (lastPreparedClass != null && !lastPreparedClass.equals(type)) { LOG.error("EventSet contains ClassPrepareEvents for: " + lastPreparedClass + " and " + type); } lastPreparedClass = type; processClassPrepareEvent(suspendContext, (ClassPrepareEvent)event, notifiedClassPrepareEventRequestors); } else if (event instanceof LocatableEvent) { preloadThreadInfo(((LocatableEvent)event).thread()); //AccessWatchpointEvent, BreakpointEvent, ExceptionEvent, MethodEntryEvent, MethodExitEvent, //ModificationWatchpointEvent, StepEvent, WatchpointEvent if (event instanceof StepEvent) { processStepEvent(suspendContext, (StepEvent)event); } else { processLocatableEvent(suspendContext, (LocatableEvent)event); } } else if (event instanceof ClassUnloadEvent) { processDefaultEvent(suspendContext); } } catch (VMDisconnectedException e) { LOG.debug(e); } catch (InternalException e) { LOG.info(e); } catch (Throwable e) { LOG.error(e); } } } }); } catch (InternalException e) { LOG.debug(e); } catch (InterruptedException | ProcessCanceledException | VMDisconnectedException e) { throw e; } catch (Throwable e) { LOG.debug(e); } } } catch (InterruptedException | VMDisconnectedException e) { invokeVMDeathEvent(); } finally { Thread.interrupted(); // reset interrupted status } } private void invokeVMDeathEvent() { getManagerThread().invokeAndWait(new DebuggerCommandImpl() { @Override protected void action() { SuspendContextImpl suspendContext = getSuspendManager().pushSuspendContext(EventRequest.SUSPEND_NONE, 1); processVMDeathEvent(suspendContext, null); } }); } } private static void preprocessEvent(SuspendContextImpl suspendContext, ThreadReference thread) { ThreadReferenceProxyImpl oldThread = suspendContext.getThread(); suspendContext.setThread(thread); if(oldThread == null) { //this is the first event in the eventSet that we process suspendContext.getDebugProcess().beforeSuspend(suspendContext); } } private static Consumer<? super Event> getEventRequestHandler(Event event) { EventRequest request = event.request(); Object property = request != null ? request.getProperty(REQUEST_HANDLER) : null; if (property instanceof Consumer) { //noinspection unchecked return ((Consumer<? super Event>)property); } return null; } public static void enableRequestWithHandler(EventRequest request, Consumer<? super Event> handler) { request.putProperty(REQUEST_HANDLER, handler); DebuggerUtilsAsync.setEnabled(request, true); } private static void enableNonSuspendingRequest(EventRequest request, Consumer<? super Event> handler) { request.setSuspendPolicy(EventRequest.SUSPEND_NONE); enableRequestWithHandler(request, handler); } private void processVMStartEvent(final SuspendContextImpl suspendContext, VMStartEvent event) { preprocessEvent(suspendContext, event.thread()); LOG.debug("enter: processVMStartEvent()"); showStatusText(this, event); getSuspendManager().voteResume(suspendContext); } private void vmAttached() { DebuggerManagerThreadImpl.assertIsManagerThread(); LOG.assertTrue(!isAttached()); if (myState.compareAndSet(State.INITIAL, State.ATTACHED)) { final VirtualMachineProxyImpl machineProxy = getVirtualMachineProxy(); boolean canBeModified = machineProxy.canBeModified(); if (canBeModified) { final EventRequestManager requestManager = machineProxy.eventRequestManager(); if (machineProxy.canGetMethodReturnValues()) { myReturnValueWatcher = new MethodReturnValueWatcher(requestManager, this); } enableNonSuspendingRequest(requestManager.createThreadStartRequest(), event -> { ThreadReference thread = ((ThreadStartEvent)event).thread(); machineProxy.threadStarted(thread); myDebugProcessDispatcher.getMulticaster().threadStarted(this, thread); }); enableNonSuspendingRequest(requestManager.createThreadDeathRequest(), event -> { ThreadReference thread = ((ThreadDeathEvent)event).thread(); machineProxy.threadStopped(thread); myDebugProcessDispatcher.getMulticaster().threadStopped(this, thread); }); } // fill position managers PositionManagerFactory.EP_NAME.extensions(getProject()) .map(factory -> factory.createPositionManager(this)) .filter(Objects::nonNull) .forEach(this::appendPositionManager); PositionManagerFactory.EP_NAME.getPoint(getProject()).addExtensionPointListener(new ExtensionPointListener<>() { @Override public void extensionAdded(@NotNull PositionManagerFactory extension, @NotNull PluginDescriptor pluginDescriptor) { PositionManager manager = extension.createPositionManager(DebugProcessEvents.this); if (manager != null) { appendPositionManager(manager); } } }, false, myDisposable); myDebugProcessDispatcher.getMulticaster().processAttached(this); if (canBeModified) { createStackCapturingBreakpoints(); AsyncStacksUtils.setupAgent(this); } if (canBeModified) { MemoryAgentUtil.setupAgent(this); } XDebugSessionImpl session = (XDebugSessionImpl)getSession().getXDebugSession(); // breakpoints should be initialized after all processAttached listeners work ApplicationManager.getApplication().runReadAction(() -> { if (session != null) { session.initBreakpoints(); } }); if (Registry.is("debugger.track.instrumentation", true) && canBeModified) { trackClassRedefinitions(); } showStatusText(JavaDebuggerBundle.message("status.connected", DebuggerUtilsImpl.getConnectionDisplayName(getConnection()))); LOG.debug("leave: processVMStartEvent()"); if (session != null) { session.setReadOnly(!canBeModified); session.setPauseActionSupported(canBeModified); } if (!canBeModified) { myDebugProcessDispatcher.getMulticaster().paused(getSuspendManager().pushSuspendContext(EventRequest.SUSPEND_ALL, 0)); UIUtil.invokeLaterIfNeeded(() -> XDebugSessionTab.showFramesView(session)); } } } private void trackClassRedefinitions() { getManagerThread().invoke(PrioritizedTask.Priority.HIGH, () -> InstrumentationTracker.track(this)); } private void createStackCapturingBreakpoints() { getManagerThread().invoke(PrioritizedTask.Priority.HIGH, () -> { StackCapturingLineBreakpoint.deleteAll(this); StackCapturingLineBreakpoint.createAll(this); }); } private void processVMDeathEvent(SuspendContextImpl suspendContext, @Nullable Event event) { // do not destroy another process on reattach VirtualMachine vm = getVirtualMachineProxy().getVirtualMachine(); if (isAttached() && (event == null || vm == event.virtualMachine())) { try { preprocessEvent(suspendContext, null); cancelRunToCursorBreakpoint(); } finally { DebuggerEventThread eventThread = myEventThreads.get(vm); if (eventThread != null) { eventThread.stopListening(); myEventThreads.remove(vm); } closeProcess(false); } } if(event != null) { showStatusText(this, event); } } private void processClassPrepareEvent(SuspendContextImpl suspendContext, ClassPrepareEvent event, Set<ClassPrepareRequestor> notifiedRequestors) { preprocessEvent(suspendContext, event.thread()); if (LOG.isDebugEnabled()) { LOG.debug("Class prepared: " + event.referenceType().name()); } try { suspendContext.getDebugProcess().getRequestsManager().processClassPrepared(event, notifiedRequestors); } finally { getSuspendManager().voteResume(suspendContext); } } private void processStepEvent(SuspendContextImpl suspendContext, StepEvent event) { final ThreadReference thread = event.thread(); //LOG.assertTrue(thread.isSuspended()); preprocessEvent(suspendContext, thread); RequestHint hint = getRequestHint(event); deleteStepRequests(event.thread()); boolean shouldResume = false; final Project project = getProject(); if (hint != null) { final int nextStepDepth = hint.getNextStepDepth(suspendContext); if (nextStepDepth == RequestHint.RESUME) { getSession().clearSteppingThrough(); shouldResume = true; } else if (nextStepDepth != RequestHint.STOP) { final ThreadReferenceProxyImpl threadProxy = suspendContext.getThread(); doStep(suspendContext, threadProxy, hint.getSize(), nextStepDepth, hint); shouldResume = true; } if(!shouldResume && hint.isRestoreBreakpoints()) { DebuggerManagerEx.getInstanceEx(project).getBreakpointManager().enableBreakpoints(this); } } if(shouldResume) { getSuspendManager().voteResume(suspendContext); } else { showStatusText(""); stopWatchingMethodReturn(); getSuspendManager().voteSuspend(suspendContext); if (hint != null) { final MethodFilter methodFilter = hint.getMethodFilter(); if (methodFilter instanceof NamedMethodFilter && !hint.wasStepTargetMethodMatched()) { final String message = JavaDebuggerBundle.message("notification.method.has.not.been.called", ((NamedMethodFilter)methodFilter).getMethodName()); XDebuggerManagerImpl.getNotificationGroup().createNotification(message, MessageType.INFO).notify(project); } if (hint.wasStepTargetMethodMatched()) { suspendContext.getDebugProcess().resetIgnoreSteppingFilters(event.location(), hint); } } } } // Preload thread info in "parallel" commands, to avoid sync jdwp requests after private static void preloadThreadInfo(@Nullable ThreadReference thread) { if (Registry.is("debugger.preload.thread.info") && thread != null) { if (DebuggerUtilsAsync.isAsyncEnabled() && thread instanceof ThreadReferenceImpl) { ThreadReferenceImpl t = (ThreadReferenceImpl)thread; try { CompletableFuture.allOf(t.frameCountAsync(), t.nameAsync(), t.statusAsync(), t.frameAsync(0)).get(); } catch (Exception e) { LOG.error(e); } } } } private static RequestHint getRequestHint(Event event) { return (RequestHint)event.request().getProperty("hint"); } private void processLocatableEvent(final SuspendContextImpl suspendContext, final LocatableEvent event) { ThreadReference thread = event.thread(); //LOG.assertTrue(thread.isSuspended()); preprocessEvent(suspendContext, thread); //we use schedule to allow processing other events during processing this one //this is especially necessary if a method is breakpoint condition getManagerThread().schedule(new SuspendContextCommandImpl(suspendContext) { @Override public void contextAction(@NotNull SuspendContextImpl suspendContext) { final SuspendManager suspendManager = getSuspendManager(); SuspendContextImpl evaluatingContext = SuspendManagerUtil.getEvaluatingContext(suspendManager, suspendContext.getThread()); final LocatableEventRequestor requestor = (LocatableEventRequestor)RequestManagerImpl.findRequestor(event.request()); if (evaluatingContext != null && !(requestor instanceof InstrumentationTracker.InstrumentationMethodBreakpoint) && !DebuggerSession.enableBreakpointsDuringEvaluation()) { notifySkippedBreakpoints(event, true); // is inside evaluation, so ignore any breakpoints suspendManager.voteResume(suspendContext); return; } boolean resumePreferred = requestor != null && DebuggerSettings.SUSPEND_NONE.equals(requestor.getSuspendPolicy()); boolean requestHit = false; long start = requestor instanceof OverheadProducer && ((OverheadProducer)requestor).track() ? System.currentTimeMillis() : 0; try { requestHit = (requestor != null) && requestor.processLocatableEvent(this, event); } catch (final LocatableEventRequestor.EventProcessingException ex) { if (LOG.isDebugEnabled()) { LOG.debug(ex.getMessage()); } final boolean[] considerRequestHit = new boolean[]{true}; DebuggerInvocationUtil.invokeAndWait(getProject(), () -> { final String displayName = requestor instanceof Breakpoint? ((Breakpoint)requestor).getDisplayName() : requestor.getClass().getSimpleName(); final String message = JavaDebuggerBundle.message("error.evaluating.breakpoint.condition.or.action", displayName, ex.getMessage()); considerRequestHit[0] = Messages.showYesNoDialog(getProject(), message, ex.getTitle(), Messages.getQuestionIcon()) == Messages.YES; }, ModalityState.NON_MODAL); requestHit = considerRequestHit[0]; resumePreferred = !requestHit; } finally { if (start > 0) { OverheadTimings.add(DebugProcessEvents.this, (OverheadProducer)requestor, requestHit || requestor instanceof StackCapturingLineBreakpoint ? 1 : 0, System.currentTimeMillis() - start); } } if (requestHit && requestor instanceof Breakpoint) { // if requestor is a breakpoint and this breakpoint was hit, no matter its suspend policy ApplicationManager.getApplication().runReadAction(() -> { XDebugSession session = getSession().getXDebugSession(); if (session != null) { XBreakpoint breakpoint = ((Breakpoint)requestor).getXBreakpoint(); if (breakpoint != null) { ((XDebugSessionImpl)session).processDependencies(breakpoint); } } }); } // special check for smart step into with this breakpoint inside the expressions EventSet eventSet = suspendContext.getEventSet(); if (eventSet != null && eventSet.size() > 1) { List<StepEvent> stepEvents = StreamEx.of(eventSet).select(StepEvent.class).toList(); if (!stepEvents.isEmpty()) { resumePreferred = resumePreferred || stepEvents.stream() .map(DebugProcessEvents::getRequestHint) .allMatch(h -> { if (h != null) { Integer depth = h.checkCurrentPosition(suspendContext, event.location()); return depth != null && depth != RequestHint.STOP; } return false; }); } } if(!requestHit || resumePreferred) { suspendManager.voteResume(suspendContext); } else { stopWatchingMethodReturn(); //if (suspendContext.getSuspendPolicy() == EventRequest.SUSPEND_ALL) { // // there could be explicit resume as a result of call to voteSuspend() // // e.g. when breakpoint was considered invalid, in that case the filter will be applied _after_ // // resuming and all breakpoints in other threads will be ignored. // // As resume() implicitly cleares the filter, the filter must be always applied _before_ any resume() action happens // myBreakpointManager.applyThreadFilter(DebugProcessEvents.this, event.thread()); //} suspendManager.voteSuspend(suspendContext); showStatusText(DebugProcessEvents.this, event); } } }); } private final AtomicBoolean myNotificationsCoolDown = new AtomicBoolean(); private void notifySkippedBreakpoints(@Nullable LocatableEvent event, boolean isEvaluation) { if (event != null && myNotificationsCoolDown.compareAndSet(false, true)) { AppExecutorUtil.getAppScheduledExecutorService().schedule(() -> myNotificationsCoolDown.set(false), 1, TimeUnit.SECONDS); String message = isEvaluation ? JavaDebuggerBundle.message("message.breakpoint.skipped", event.location()) : JavaDebuggerBundle.message("message.breakpoint.skipped.other.thread", event.location()); XDebuggerManagerImpl.getNotificationGroup() .createNotification(message, MessageType.WARNING) .notify(getProject()); } } @Nullable private static LocatableEvent getLocatableEvent(EventSet eventSet) { return StreamEx.of(eventSet).select(LocatableEvent.class).findFirst().orElse(null); } private void processDefaultEvent(SuspendContextImpl suspendContext) { preprocessEvent(suspendContext, null); getSuspendManager().voteResume(suspendContext); } }
apache-2.0
domaframework/doma
integration-test-java/src/main/java/org/seasar/doma/it/embeddable/StaffInfo.java
430
package org.seasar.doma.it.embeddable; import java.sql.Date; import org.seasar.doma.Embeddable; import org.seasar.doma.it.domain.Salary; @Embeddable public class StaffInfo { public final int managerId; public final Date hiredate; public final Salary salary; public StaffInfo(int managerId, Date hiredate, Salary salary) { this.managerId = managerId; this.hiredate = hiredate; this.salary = salary; } }
apache-2.0
metaborg/nabl
nabl2.terms/src/main/java/mb/nabl2/terms/build/AAppl1Term.java
1381
package mb.nabl2.terms.build; import java.util.List; import org.immutables.serial.Serial; import org.immutables.value.Value; import com.google.common.collect.ImmutableList; import mb.nabl2.terms.IApplTerm; import mb.nabl2.terms.ITerm; import mb.nabl2.terms.Terms; @Value.Immutable @Serial.Version(value = 42L) abstract class AAppl1Term extends AbstractApplTerm implements IApplTerm { @Override @Value.Check protected AAppl1Term check() { if(Terms.TUPLE_OP.equals(getOp())) { throw new IllegalArgumentException("1-tuples are not supported."); } return this; } @Value.Parameter @Override public abstract String getOp(); @Value.Parameter public abstract ITerm getArg0(); @Override public List<ITerm> getArgs() { return ImmutableList.of(getArg0()); } @Override public int getArity() { return 1; } @Override public <T> T match(Cases<T> cases) { return cases.caseAppl(this); } @Override public <T, E extends Throwable> T matchOrThrow(CheckedCases<T, E> cases) throws E { return cases.caseAppl(this); } @Override public int hashCode() { return super.hashCode(); } @Override public boolean equals(Object other) { return super.equals(other); } @Override public String toString() { return super.toString(); } }
apache-2.0
twitter/twitter4j
twitter4j-examples/src/main/java/twitter4j/examples/list/GetAllUserLists.java
1947
/* * Copyright 2007 Yusuke Yamamoto * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package twitter4j.examples.list; import twitter4j.ResponseList; import twitter4j.Twitter; import twitter4j.TwitterException; import twitter4j.TwitterFactory; import twitter4j.UserList; /** * List the all lists the authenticating or specified user subscribes to, including their own. * * @author Yusuke Yamamoto - yusuke at mac.com */ public final class GetAllUserLists { /** * Usage: java twitter4j.examples.list.GetAllUserLists [screen name] * * @param args message */ public static void main(String[] args) { if (args.length < 1) { System.out.println("Usage: java twitter4j.examples.list.GetAllUserLists [screen name]"); System.exit(-1); } try { Twitter twitter = new TwitterFactory().getInstance(); ResponseList<UserList> lists = twitter.getAllUserLists(args[0]); for (UserList list : lists) { System.out.println("id:" + list.getId() + ", name:" + list.getName() + ", description:" + list.getDescription() + ", slug:" + list.getSlug() + ""); } System.exit(0); } catch (TwitterException te) { te.printStackTrace(); System.out.println("Failed to list the lists: " + te.getMessage()); System.exit(-1); } } }
apache-2.0
javalite/activejdbc
activejdbc/src/test/java/org/javalite/activejdbc/test_models/Cake.java
731
/* Copyright 2009-2019 Igor Polevoy Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.javalite.activejdbc.test_models; import org.javalite.activejdbc.Model; /** * @author Stephane Restani */ public class Cake extends Pastry { }
apache-2.0
shamanDevel/ProceduralTerrain
src/org/shaman/terrain/SpatialViewer.java
6216
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package org.shaman.terrain; import com.jme3.app.SimpleApplication; import com.jme3.asset.AssetManager; import com.jme3.bounding.BoundingBox; import com.jme3.input.ChaseCamera; import com.jme3.input.KeyInput; import com.jme3.input.controls.ActionListener; import com.jme3.input.controls.KeyTrigger; import com.jme3.light.AmbientLight; import com.jme3.light.PointLight; import com.jme3.material.Material; import com.jme3.math.ColorRGBA; import com.jme3.math.FastMath; import com.jme3.math.Vector3f; import com.jme3.post.SceneProcessor; import com.jme3.renderer.RenderManager; import com.jme3.renderer.ViewPort; import com.jme3.renderer.queue.RenderQueue; import com.jme3.scene.Geometry; import com.jme3.scene.Mesh; import com.jme3.scene.Spatial; import com.jme3.scene.debug.Arrow; import com.jme3.scene.debug.WireBox; import com.jme3.scene.shape.Box; import com.jme3.texture.FrameBuffer; import java.util.concurrent.Callable; import java.util.logging.Logger; import static de.lessvoid.nifty.layout.align.HorizontalAlign.center; /** * * @author Sebastian Weiss */ public class SpatialViewer extends SimpleApplication { private static final Logger LOG = Logger.getLogger(SpatialViewer.class.getName()); private ChaseCamera chaseCam; private Spatial center; private PointLight light; private boolean useLight = true; private Spatial spatial; private Geometry bounds; private WireProcessor wireProcessor; private boolean wireframes = false; @Override public void simpleInitApp() { center = createBox(0, 0, 0, ColorRGBA.Blue); attachCoordinateAxes(Vector3f.ZERO); flyCam.setDragToRotate(true); // flyCam.setEnabled(false); // chaseCam = new ChaseCamera(cam, center, inputManager); light = new PointLight(); light.setColor(ColorRGBA.White); light.setPosition(new Vector3f(0, 6, -6)); rootNode.addLight(light); AmbientLight am = new AmbientLight(); am.setColor(ColorRGBA.White.mult(0.5f)); rootNode.addLight(am); bounds = new Geometry("bounds", new WireBox(0, 0, 0)); Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); mat.setColor("Color", ColorRGBA.Gray); bounds.setMaterial(mat); rootNode.attachChild(bounds); wireProcessor = new WireProcessor(assetManager); inputManager.addMapping("Wireframes", new KeyTrigger(KeyInput.KEY_RETURN)); inputManager.addListener(new ActionListener() { @Override public void onAction(String name, boolean isPressed, float tpf) { if ("Wireframes".equals(name) && isPressed) { wireframes = !wireframes; if (wireframes) { viewPort.addProcessor(wireProcessor); } else { viewPort.removeProcessor(wireProcessor); } } } }, "Wireframes"); } public void setSpatial(final Spatial geom) { LOG.info("show spatial " + geom); LOG.info("vertex count: " + geom.getVertexCount()); LOG.info("triangle count: " + geom.getTriangleCount()); LOG.info("bounding volume: " + geom.getWorldBound()); enqueue(new Callable<Void>() { @Override public Void call() throws Exception { if (spatial != null) { rootNode.detachChild(spatial); } geom.rotate(-FastMath.HALF_PI, 0, 0); BoundingBox b = (BoundingBox) geom.getWorldBound(); bounds.setMesh(new WireBox(b.getXExtent(), b.getYExtent(), b.getZExtent())); bounds.setLocalTranslation(b.getCenter()); spatial = geom; rootNode.attachChild(spatial); LOG.info("attached"); return null; } }); } @Override public void update() { super.update(); if (light != null) { light.setPosition(cam.getLocation()); } } private Geometry createBox(float x, float y, float z, ColorRGBA color) { Box b = new Box(Vector3f.ZERO, 0.2f, 0.2f, 0.2f); // create cube shape at the origin Geometry g = new Geometry("Box", b); // create cube geometry from the shape Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); // create a simple material mat.setColor("Color", color); // set color of material to blue g.setMaterial(mat); // set the cube's material g.setLocalTranslation(x, y, z); rootNode.attachChild(g); // make the cube appear in the scene return g; } private void attachCoordinateAxes(Vector3f pos) { Arrow arrow = new Arrow(Vector3f.UNIT_X.mult(3)); arrow.setLineWidth(4); // make arrow thicker putShape(arrow, ColorRGBA.Red).setLocalTranslation(pos); arrow = new Arrow(Vector3f.UNIT_Y.mult(3)); arrow.setLineWidth(4); // make arrow thicker putShape(arrow, ColorRGBA.Green).setLocalTranslation(pos); arrow = new Arrow(Vector3f.UNIT_Z.mult(3)); arrow.setLineWidth(4); // make arrow thicker putShape(arrow, ColorRGBA.Blue).setLocalTranslation(pos); } private Geometry putShape(Mesh shape, ColorRGBA color) { Geometry g = new Geometry("coordinate axis", shape); Material mat = new Material(assetManager, "Common/MatDefs/Misc/Unshaded.j3md"); mat.getAdditionalRenderState().setWireframe(true); mat.setColor("Color", color); g.setMaterial(mat); rootNode.attachChild(g); return g; } public class WireProcessor implements SceneProcessor { RenderManager renderManager; Material wireMaterial; public WireProcessor(AssetManager assetManager) { wireMaterial = new Material(assetManager, "/Common/MatDefs/Misc/Unshaded.j3md"); wireMaterial.setColor("Color", ColorRGBA.Blue); wireMaterial.getAdditionalRenderState().setWireframe(true); } public void initialize(RenderManager rm, ViewPort vp) { renderManager = rm; } public void reshape(ViewPort vp, int w, int h) { throw new UnsupportedOperationException("Not supported yet."); } public boolean isInitialized() { return renderManager != null; } public void preFrame(float tpf) { } public void postQueue(RenderQueue rq) { renderManager.setForcedMaterial(wireMaterial); } public void postFrame(FrameBuffer out) { renderManager.setForcedMaterial(null); } public void cleanup() { renderManager.setForcedMaterial(null); } } }
apache-2.0
JaLandry/MeasureAuthoringTool_LatestSprint
mat/test/mat/util/Substring.java
432
package mat.util; public class Substring { public static void main(String[] av) { String a = "Java is great."; System.out.println(a); String b = a.substring(5); // b is the String "is great." System.out.println(b); String c = a.substring(0,1);// c is the String "is" System.out.println(c); String d = a.substring(5,a.length());// d is "is great." System.out.println(d); } }
apache-2.0
dingyifly/medical
src/main/java/com/medical/modules/sys/dao/RoleDao.java
834
/** * Copyright &copy; 2012-2014 <a href="https://github.com/thinkgem/jeesite">JeeSite</a> All rights reserved. */ package com.medical.modules.sys.dao; import com.medical.common.persistence.CrudDao; import com.medical.common.persistence.annotation.MyBatisDao; import com.medical.modules.sys.entity.Role; /** * 角色DAO接口 * @author ThinkGem * @version 2013-12-05 */ @MyBatisDao public interface RoleDao extends CrudDao<Role> { public Role getByName(Role role); public Role getByEnname(Role role); /** * 维护角色与菜单权限关系 * @param role * @return */ public int deleteRoleMenu(Role role); public int insertRoleMenu(Role role); /** * 维护角色与公司部门关系 * @param role * @return */ public int deleteRoleOffice(Role role); public int insertRoleOffice(Role role); }
apache-2.0
hubme/WorkHelperApp
javalib/src/main/java/com/example/exception/ExceptionMain.java
756
package com.example.exception; /** * @author VanceKing * @since 2016/12/31. */ public class ExceptionMain { public static void main(String[] args) { ExceptionMain main = new ExceptionMain(); try { main.reThrowException("First"); } catch (FirstException | SecondException e) {//异常参数变量(e)是定义为final的,所以不能被修改 System.out.print(e.getMessage()); e.printStackTrace(); } } private void reThrowException(String s) throws FirstException, SecondException { if ("First".equals(s)) { throw new FirstException("First"); } else if ("Second".equals(s)) { throw new SecondException("Second"); } } }
apache-2.0
USC-NSL/SIF
src/enl/sif/examples/MethodTracing.java
909
package enl.sif.examples; import enl.sif.codepoint.BytecodePosition; import enl.sif.codepoint.CP; import enl.sif.codepoint.CPFinder; import enl.sif.codepoint.CPInstrumenter; import enl.sif.codepoint.InstrumentOperation; import enl.sif.codepoint.InstrumentPosition; import enl.sif.codepoint.SIFARunnable; import enl.sif.codepoint.ThisType; import enl.sif.codepoint.UserDefinedInvoke; public class MethodTracing implements SIFARunnable { public void run() { CPFinder.init(); CPFinder.setClass(null, "android.app.Activity"); CPFinder.setMethod("onCreate:\\(Landroid\\/os\\/Bundle;\\)V"); CPFinder.setBytecode(BytecodePosition.ENTRY); for (CP cp : CPFinder.apply()) { UserDefinedInvoke code = new UserDefinedInvoke("enl.sif.examples.MyLogger", "start", new ThisType()); CPInstrumenter.dryRun(cp, InstrumentOperation.INSERT, InstrumentPosition.BEFORE, code); } CPInstrumenter.exec(); } }
apache-2.0
super-meetup/SuperMeetup
app/src/main/java/com/supermeetup/supermeetup/model/Membership.java
510
package com.supermeetup.supermeetup.model; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; import org.parceler.Parcel; import java.util.ArrayList; /** * Created by Irene on 10/21/17. */ @Parcel public class Membership { @SerializedName("member") @Expose ArrayList<Member> member; public ArrayList<Member> getMember() { return member; } public void setMember(ArrayList<Member> member) { this.member = member; } }
apache-2.0
axellebot/Tub-Android
presentation/src/main/java/fr/bourgmapper/tub/presentation/presenter/InfoFragmentPresenter.java
915
package fr.bourgmapper.tub.presentation.presenter; import javax.inject.Inject; import fr.bourgmapper.tub.presentation.internal.di.PerFragment; import fr.bourgmapper.tub.presentation.view.fragment.InfoFragment; /** * {@link Presenter} that controls communication between views and models of the presentation * layer. */ @PerFragment public class InfoFragmentPresenter implements Presenter { private static String TAG = "MapOverviewFragmentPrstr"; private InfoFragment infoFragment; @Inject InfoFragmentPresenter() { } public void setView(InfoFragment infoFragment) { this.infoFragment = infoFragment; } @Override public void resume() { } @Override public void pause() { } @Override public void destroy() { this.infoFragment = null; } /** * Initializes the presenter. */ public void initialize() { } }
apache-2.0
patrickfav/tuwien
bakk/OOP WS08/src/aufgabe8/Container.java
5269
package aufgabe8; import java.util.ArrayList; import java.util.Random; public class Container implements SupplierNotifier, ConsumerNotifier, InventoryListener, LoggingInterface { private int count; private ArrayList<Product> content; private ArrayList<SupplierListener> suppliers; private ArrayList<ConsumerListener> consumers; private final int capacity; private int actualvolume; private Thread collectorThread; private Thread supplierThread; private boolean working = false; private boolean debugmode = false; public Container(boolean debugmode, int capacity) { count = 0; this.capacity = capacity; this.suppliers = new ArrayList<SupplierListener>(); this.consumers = new ArrayList<ConsumerListener>(); this.content = new ArrayList<Product>(); actualvolume = 0; this.debugmode=debugmode; } /** * adds the delivered supplierListener instance to the registered ones */ public synchronized void registerSupplierListener(SupplierListener listener) { write(true, "the Supplier "+ listener.toString() +" has registered itself"); suppliers.add(listener); } /** * removes the delivered supplierListener instance to the registered ones */ public synchronized void derigisterSupplierListener(SupplierListener listener) { int i = suppliers.indexOf(listener); if (i >=0) { write(true, "the Supplier "+ listener.toString() +" has derigistered itself"); suppliers.remove(i); } } /** * adds stick to container */ public boolean collectProduct(Product product) { if (actualvolume < capacity /*&& !suppliers.isEmpty()*/) { write(true, "is product null:"+(product == null)); content.add(product); count++; actualvolume++; write(true, "added stick " + product.toString() + " to container. Acutal volume " + actualvolume + ". Count is " + count); return true; } else return false; } /** * adds the delivered ConsumerListener instance to the registered ones */ public synchronized void registerConsumerListener(ConsumerListener listener) { write(true, "adding Listener " +listener.toString() +"to container."); consumers.add(listener); } /** * removes the delivered ConsumerListener instance to the registered ones */ public synchronized void derigisterConsumerListener(ConsumerListener listener) { int i = consumers.indexOf(listener); if (i >=0) { write(true, "removing Listener " +listener.toString()); consumers.remove(i); } } /** * starts the collector thread and the supplier thread for an instance of container * while the variable working = true these threads ask for products until the full * capacity isn't reached and they try to provide the products to registered consumers * till the actualvolume is 0 */ public void startworking() { write(true, "startworking has been called."); working = true; collectorThread = new Thread(new Runnable(){ public void run() { write(true, "collectorThread started."); while(working){ write(true, "actualvolume=" +actualvolume +" capacity="+capacity); while(actualvolume < capacity){ if (suppliers.size() > 0){ Random r = new Random(); int i = Math.round(r.nextInt(suppliers.size())); write(true, "selected the item nr " +i +" to provide the product."); SupplierListener randSupplier = suppliers.get(i); randSupplier.deliverStick(); write(true, "asked " + randSupplier.toString() +" for it's product."); } if (!working) break; } } write(true, "collectorThread ended."); } }); supplierThread = new Thread(new Runnable(){ public void run() { write(true, "supplierThread started."); while(working){ while(actualvolume >0){ if (consumers.size() > 0) { Random r = new Random(); int i = Math.round(r.nextInt(consumers.size())); ConsumerListener randConsumer = consumers.get(i); randConsumer.collectProduct(content.get(0)); write(true, "delivered " + randConsumer.toString() + " the product "+ content.get(0).toString() + "."); content.remove(0); actualvolume--; } if (!working) break; } } write(true, "supplierThread ended."); } }); collectorThread.setName(this.toString()+".collectorThread"); supplierThread.setName(this.toString()+".supplierThread"); collectorThread.start(); supplierThread.start(); } /** * signals the collector and the supplier threads to stop by setting there run variable to false */ public void stopworking() { write(true, "stopworking has been called."); working = false; } /** * interrupts the collector and the supplier Threads even if they are in a sleep cycle */ public void eStop() { stopworking(); write(true, "both Threads have been interrupted."); } /** * checks if a normal message should be written out or if it is a debug message(if the instance * isn't in debugmode then debug messages won't be written) */ public void write(boolean debug, String message) { String prefix = "Container -"+this.toString()+"-> "; if(debug){ if (debugmode) System.out.println(prefix + message); } else System.out.println(prefix + message); } public void writeCompleteStatus() { } }
apache-2.0
veggiespam/zap-extensions
addOns/ascanrulesBeta/src/main/java/org/zaproxy/zap/extension/ascanrulesBeta/SQLInjectionSQLite.java
47738
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2014 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.ascanrulesBeta; import java.net.UnknownHostException; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.httpclient.InvalidRedirectLocationException; import org.apache.commons.httpclient.URIException; import org.apache.commons.lang.RandomStringUtils; import org.apache.log4j.Logger; import org.parosproxy.paros.Constant; import org.parosproxy.paros.core.scanner.AbstractAppParamPlugin; import org.parosproxy.paros.core.scanner.Alert; import org.parosproxy.paros.core.scanner.Category; import org.parosproxy.paros.network.HttpMessage; import org.zaproxy.zap.model.Tech; import org.zaproxy.zap.model.TechSet; /** * The SQLInjectionSQLite plugin identifies SQLite specific SQL Injection vulnerabilities using * SQLite specific syntax. If it doesn't use SQLite specific syntax, it belongs in the generic * SQLInjection class! * * @author 70pointer */ public class SQLInjectionSQLite extends AbstractAppParamPlugin { // Some relevant notes about SQLite's support for various functions, which will affect what SQL // is valid, // and can be used to exploit each particular version :) // some of the functions noted here are *very* useful in exploiting the system hosting the // SQLite instance, // but I won't give the game away too easily. Go have a play! // 3.8.6 (2014-08-15) - hex integer literals supported, likely(X) supported, readfile(X) // and writefile(X,Y) supported (if extension loading enabled) // 3.8.3 (2014-02-03) - common table subexpressions supported ("WITH" keyword), printf // function supported // 3.8.1 (2013-10-17) - unlikely() and likelihood() functions supported // 3.8.0 (2013-08-26) - percentile() function added as loadable extension // 3.7.17 (2013-05-20) - new loadable extensions: including amatch, closure, fuzzer, // ieee754, nextchar, regexp, spellfix, and wholenumber // 3.7.16 (2013-03-18) - unicode(A) and char(X1,...,XN) supported // 3.7.15 (2012-12-12) - instr() supported // 3.6.8 (2009-01-12) - nested transactions supported // 3.5.7 (2008-03-17) - ALTER TABLE uses double-quotes instead of single-quotes for // quoting filenames (why filenames????). // 3.3.13 (2007-02-13) - randomBlob() and hex() supported // 3.3.8 (2006-10-09) - IF EXISTS on CREATE/DROP TRIGGER/VIEW // 3.3.7 (2006-08-12) - virtual tables, dynamically loaded extensions, MATCH operator // supported // 3.2.6 (2005-09-17) - COUNT(DISTINCT expr) supported // 3.2.3 (2005-08-21) - CAST operator, grave-accent quoting supported // 3.2.0 (2005-03-21) - ALTER TABLE ADD COLUMN supported // 3.1.0 ALPHA (2005-01-21) - ALTER TABLE ... RENAME TABLE, CURRENT_TIME, CURRENT_DATE, and // CURRENT_TIMESTAMP, EXISTS clause, correlated subqueries supported // 3.0.0 alpha (2004-06-18) - dropped support for COPY function, possibly added // sqlite_source_id(), replace() (which were not in 2.8.17) // 2.8.6 (2003-08-21) - date functions added // 2.8.5 (2003-07-22) - supports LIMIT on a compound SELECT statement // 2.8.1 (2003-05-16) - ATTACH and DETACH commands supported // 2.5.0 (2002-06-17) - Double-quoted strings interpreted as column names not text // literals, // SQL-92 compliant handling of NULLs, full SQL-92 join syntax and // LEFT OUTER JOINs supported // 2.4.7 (2002-04-06) - "select TABLE.*", last_insert_rowid() supported // 2.4.4 (2002-03-24) - CASE expressions supported // 2.4.0 (2002-03-10) - coalesce(), lower(), upper(), and random() supported // 2.3.3 (2002-02-18) - Allow identifiers to be quoted in square brackets, "CREATE TABLE // AS SELECT" supported // 2.2.0 (2001-12-22) - "SELECT rowid, * FROM table1" supported // 2.0.3 (2001-10-13) - &, |,~,<<,>>,round() and abs() supported // 2.0.1 (2001-10-02) - "expr NOT NULL", "expr NOTNULL" supported // 1.0.32 (2001-07-23) - quoted strings supported as table and column names in expressions // 1.0.28 (2001-04-04) - special column names ROWID, OID, and _ROWID_ supported (with // random values) // 1.0.4 (2000-08-28) - length() and substr() supported // 1.0 (2000-08-17) - covers lots of unversioned releases. ||, fcnt(), UNION, UNION // ALL, INTERSECT, and EXCEPT, LIKE, GLOB, COPY supported private boolean doTimeBased = false; private int doTimeMaxRequests = 0; private boolean doUnionBased = false; private int doUnionMaxRequests = 0; /** SQLite one-line comment */ public static final String SQL_ONE_LINE_COMMENT = "--"; /** * SQLite specific time based injection strings, where each tries to cause a measurable delay */ // Note: <<<<ORIGINALVALUE>>>> is replaced with the original parameter value at runtime in these // examples below // TODO: maybe add support for ')' after the original value, before the sleeps // Note: randomblob is supported from SQLite 3.3.13 (2007-02-13) // case statement is supported from SQLite 2.4.4 (2002-03-24) private static String[] SQL_SQLITE_TIME_REPLACEMENTS = { // omitting original param "case randomblob(<<<<NUMBLOBBYTES>>>>) when not null then 1 else 1 end ", // integer "' | case randomblob(<<<<NUMBLOBBYTES>>>>) when not null then \"\" else \"\" end | '", // character/string (single quote) "\" | case randomblob(<<<<NUMBLOBBYTES>>>>) when not null then \"\" else \"\" end | \"", // character/string (double quote) "case randomblob(<<<<NUMBLOBBYTES>>>>) when not null then 1 else 1 end " + SQL_ONE_LINE_COMMENT, // integer "' | case randomblob(<<<<NUMBLOBBYTES>>>>) when not null then \"\" else \"\" end " + SQL_ONE_LINE_COMMENT, // character/string (single quote) "\" | case randomblob(<<<<NUMBLOBBYTES>>>>) when not null then \"\" else \"\" end " + SQL_ONE_LINE_COMMENT, // character/string (double quote) // with the original parameter "<<<<ORIGINALVALUE>>>> * case randomblob(<<<<NUMBLOBBYTES>>>>) when not null then 1 else 1 end ", // integer "<<<<ORIGINALVALUE>>>>' | case randomblob(<<<<NUMBLOBBYTES>>>>) when not null then \"\" else \"\" end | '", // character/string (single quote) "<<<<ORIGINALVALUE>>>>\" | case randomblob(<<<<NUMBLOBBYTES>>>>) when not null then \"\" else \"\" end | \"", // character/string (double quote) "<<<<ORIGINALVALUE>>>> * case randomblob(<<<<NUMBLOBBYTES>>>>) when not null then 1 else 1 end " + SQL_ONE_LINE_COMMENT, // integer "<<<<ORIGINALVALUE>>>>' | case randomblob(<<<<NUMBLOBBYTES>>>>) when not null then \"\" else \"\" end " + SQL_ONE_LINE_COMMENT, // character/string (single quote) "<<<<ORIGINALVALUE>>>>\" | case randomblob(<<<<NUMBLOBBYTES>>>>) when not null then \"\" else \"\" end " + SQL_ONE_LINE_COMMENT, // character/string (double quote) }; /** if the following errors occur during the attack, it's a SQL injection vuln */ private Pattern errorMessagePatterns[] = { Pattern.compile( "no such function: randomblob", Pattern.CASE_INSENSITIVE) // this one is specific to the time-based attack // attempted here, and is indicative of SQLite versions < // 3.3.13, and >= 2.4.4 (because the CASE statement is // used) , Pattern.compile("near \\\".+\\\": syntax error", Pattern.CASE_INSENSITIVE) }; /** a template that defines how a UNION statement is built up, to find the SQLite version */ private static String UNION_ATTACK_TEMPLATE = "<<<<VALUE>>>><<<<SYNTACTIC_PREVIOUS_STATEMENT_TYPE_CLOSER>>>><<<<SYNTACTIC_PREVIOUS_STATEMENT_CLAUSE_CLOSER>>>> <<<<UNIONSTATEMENT>>>> select <<<<SQLITE_VERSION_FUNCTION>>>><<<<UNIONADDITIONALCOLUMNS>>>><<<<SYNTACTIC_NEXT_STATEMENT_COMMENTER>>>>"; private static String SYNTACTIC_PREVIOUS_STATEMENT_TYPE_CLOSERS[] = { "" // closing off an int parameter in the SQL statement , "'" // closing off a char/string parameter in the SQL statement , "\"" // closing off a string parameter in the SQL statement }; private static String SYNTACTIC_PREVIOUS_STATEMENT_CLAUSE_CLOSERS[] = { "", ")", "))", ")))", "))))", ")))))" }; private static String SYNTACTIC_UNION_STATEMENTS[] = {"UNION" // ,"UNION ALL" Not necessary }; private static String SQLITE_VERSION_FUNCTIONS[] = { "sqlite_version()" // string type - gets the version in the form of "3.7.16.2", for instance , "sqlite_version()+0" // numeric type - SQLite does implicit casting to convert "3.7.16.2" to // 3.7 in numeric form. , "sqlite_source_id()" // string type - this function was added in 3.6.18. it gets the // version in the form of "2013-04-12 11:52:43 // cbea02d93865ce0e06789db95fd9168ebac970c7" // there may not be much point in running this one, since it gives much the same info as // "sqlite_version()" and cannot be converted to an int. // maybe it's useful in the case that a WAF is detecting / blocking "sqlite_version()" // though? }; private static String UNION_ADDITIONAL_COLUMNS[] = { "", ",null", ",null,null", ",null,null,null", ",null,null,null,null", ",null,null,null,null,null", ",null,null,null,null,null,null", ",null,null,null,null,null,null,null", ",null,null,null,null,null,null,null,null", ",null,null,null,null,null,null,null,null,null" }; private static String SYNTACTIC_NEXT_STATEMENT_COMMENTER[] = {SQL_ONE_LINE_COMMENT // ,"" //this isn't useful at all }; /** set depending on the attack strength / threshold */ private long maxBlobBytes = 0; private long minBlobBytes = 100000; private long parseDelayDifference = 0; private long incrementalDelayIncreasesForAlert = 0; private char[] RANDOM_PARAMETER_CHARS = "abcdefghijklmnopqrstuvwyxz0123456789".toCharArray(); /** for logging. */ private static Logger log = Logger.getLogger(SQLInjectionSQLite.class); /** determines if we should output Debug level logging */ private boolean debugEnabled = log.isDebugEnabled(); @Override public int getId() { return 40024; } @Override public String getName() { return Constant.messages.getString("ascanbeta.sqlinjection.sqlite.name"); } @Override public boolean targets(TechSet technologies) { return technologies.includes(Tech.SQLite); } @Override public String getDescription() { return Constant.messages.getString("ascanbeta.sqlinjection.desc"); } @Override public int getCategory() { return Category.INJECTION; } @Override public String getSolution() { return Constant.messages.getString("ascanbeta.sqlinjection.soln"); } @Override public String getReference() { return Constant.messages.getString("ascanbeta.sqlinjection.refs"); } @Override public void init() { if (this.debugEnabled) log.debug("Initialising"); // set up what we are allowed to do, depending on the attack strength that was set. if (this.getAttackStrength() == AttackStrength.LOW) { doTimeBased = true; doTimeMaxRequests = 15; this.maxBlobBytes = 1000000000; doUnionBased = false; doUnionMaxRequests = 0; } else if (this.getAttackStrength() == AttackStrength.MEDIUM) { doTimeBased = true; doTimeMaxRequests = 35; this.maxBlobBytes = 1000000000; doUnionBased = false; doUnionMaxRequests = 0; } else if (this.getAttackStrength() == AttackStrength.HIGH) { doTimeBased = true; doTimeMaxRequests = 50; this.maxBlobBytes = 1000000000; doUnionBased = true; doUnionMaxRequests = 50; } else if (this.getAttackStrength() == AttackStrength.INSANE) { doTimeBased = true; doTimeMaxRequests = 500; this.maxBlobBytes = 1000000000; doUnionBased = true; doUnionMaxRequests = 5000; } // the allowable difference between a parse delay and an attack delay is controlled by the // threshold if (this.getAlertThreshold() == AlertThreshold.LOW) { parseDelayDifference = 100; incrementalDelayIncreasesForAlert = 1; } else if (this.getAlertThreshold() == AlertThreshold.MEDIUM) { parseDelayDifference = 200; incrementalDelayIncreasesForAlert = 2; } else if (this.getAlertThreshold() == AlertThreshold.HIGH) { parseDelayDifference = 400; incrementalDelayIncreasesForAlert = 3; } } /** * scans for SQL Injection vulnerabilities, using SQLite specific syntax. If it doesn't use * specifically SQLite syntax, it does not belong in here, but in TestSQLInjection */ @Override public void scan(HttpMessage originalMessage, String paramName, String originalParamValue) { try { // the original message passed to us never has the response populated. fix that by // re-retrieving it.. sendAndReceive(originalMessage, false); // do not follow redirects // Do time based SQL injection checks.. // Timing Baseline check: we need to get the time that it took the original query, to // know if the time based check is working correctly.. HttpMessage msgTimeBaseline = getNewMsg(); long originalTimeStarted = System.currentTimeMillis(); try { sendAndReceive(msgTimeBaseline); } catch (java.net.SocketTimeoutException e) { // to be expected occasionally, if the base query was one that contains some // parameters exploiting time based SQL injection? if (this.debugEnabled) log.debug( "The Base Time Check timed out on [" + msgTimeBaseline.getRequestHeader().getMethod() + "] URL [" + msgTimeBaseline.getRequestHeader().getURI().getURI() + "]"); } long originalTimeUsed = System.currentTimeMillis() - originalTimeStarted; // if the time was very slow (because JSP was being compiled on first call, for // instance) // then the rest of the time based logic will fail. Lets double-check for that scenario // by requesting the url again. // If it comes back in a more reasonable time, we will use that time instead as our // baseline. If it come out in a slow fashion again, // we will abort the check on this URL, since we will only spend lots of time trying // request, when we will (very likely) not get positive results. if (originalTimeUsed > 5000) { long originalTimeStarted2 = System.currentTimeMillis(); try { sendAndReceive(msgTimeBaseline); } catch (java.net.SocketTimeoutException e) { // to be expected occasionally, if the base query was one that contains some // parameters exploiting time based SQL injection? if (this.debugEnabled) log.debug( "Base Time Check 2 timed out on [" + msgTimeBaseline.getRequestHeader().getMethod() + "] URL [" + msgTimeBaseline.getRequestHeader().getURI().getURI() + "]"); } long originalTimeUsed2 = System.currentTimeMillis() - originalTimeStarted2; if (originalTimeUsed2 > 5000) { // no better the second time around. we need to bale out. if (this.debugEnabled) log.debug( "Both base time checks 1 and 2 for [" + msgTimeBaseline.getRequestHeader().getMethod() + "] URL [" + msgTimeBaseline.getRequestHeader().getURI().getURI() + "] are way too slow to be usable for the purposes of checking for time based SQL Injection checking. We are aborting the check on this particular url."); return; } else { // phew. the second time came in within the limits. use the later timing // details as the base time for the checks. originalTimeUsed = originalTimeUsed2; originalTimeStarted = originalTimeStarted2; } } // end of timing baseline check int countTimeBasedRequests = 0; if (this.debugEnabled) log.debug( "Scanning URL [" + getBaseMsg().getRequestHeader().getMethod() + "] [" + getBaseMsg().getRequestHeader().getURI() + "], [" + paramName + "] with value [" + originalParamValue + "] for SQL Injection"); // SQLite specific time-based SQL injection checks boolean foundTimeBased = false; for (int timeBasedSQLindex = 0; timeBasedSQLindex < SQL_SQLITE_TIME_REPLACEMENTS.length && doTimeBased && countTimeBasedRequests < doTimeMaxRequests && !foundTimeBased; timeBasedSQLindex++) { // since we have no means to create a deterministic delay in SQLite, we need to take // a different approach: // in each iteration, increase the number of random blobs for SQLite to create. If // we can detect an increasing delay, we know // that the payload has been successfully injected. int numberOfSequentialIncreases = 0; String detectableDelayParameter = null; long detectableDelay = 0; String maxDelayParameter = null; long maxDelay = 0; HttpMessage detectableDelayMessage = null; long previousDelay = originalTimeUsed; boolean potentialTimeBasedSQLInjection = false; boolean timeExceeded = false; for (long numBlobsToCreate = minBlobBytes; numBlobsToCreate <= this.maxBlobBytes && !timeExceeded && numberOfSequentialIncreases < incrementalDelayIncreasesForAlert; numBlobsToCreate *= 10) { HttpMessage msgDelay = getNewMsg(); String newTimeBasedInjectionValue = SQL_SQLITE_TIME_REPLACEMENTS[timeBasedSQLindex].replace( "<<<<ORIGINALVALUE>>>>", originalParamValue); newTimeBasedInjectionValue = newTimeBasedInjectionValue.replace( "<<<<NUMBLOBBYTES>>>>", Long.toString(numBlobsToCreate)); setParameter(msgDelay, paramName, newTimeBasedInjectionValue); if (this.debugEnabled) log.debug( "\nTrying '" + newTimeBasedInjectionValue + "'. The number of Sequential Increases already is " + numberOfSequentialIncreases); // send it. long modifiedTimeStarted = System.currentTimeMillis(); try { sendAndReceive(msgDelay); countTimeBasedRequests++; } catch (java.net.SocketTimeoutException e) { // to be expected occasionally, if the contains some parameters exploiting // time based SQL injection if (this.debugEnabled) log.debug( "The time check query timed out on [" + msgTimeBaseline.getRequestHeader().getMethod() + "] URL [" + msgTimeBaseline.getRequestHeader().getURI().getURI() + "] on field: [" + paramName + "]"); } long modifiedTimeUsed = System.currentTimeMillis() - modifiedTimeStarted; // before we do the time based checking, first check for a known error message // from the atatck, indicating a SQL injection vuln for (Pattern errorMessagePattern : errorMessagePatterns) { Matcher matcher = errorMessagePattern.matcher(msgDelay.getResponseBody().toString()); boolean errorFound = matcher.find(); if (errorFound) { // Likely an error based SQL Injection. Raise it String extraInfo = Constant.messages.getString( "ascanbeta.sqlinjection.sqlite.alert.errorbased.extrainfo", errorMessagePattern); // raise the alert bingo( Alert.RISK_HIGH, Alert.CONFIDENCE_MEDIUM, getName(), getDescription(), getBaseMsg().getRequestHeader().getURI().getURI(), // url paramName, newTimeBasedInjectionValue, extraInfo, getSolution(), errorMessagePattern.toString(), this.getCweId(), this.getWascId(), msgDelay); if (this.debugEnabled) log.debug( "A likely Error Based SQL Injection Vulnerability has been found with [" + msgDelay.getRequestHeader().getMethod() + "] URL [" + msgDelay.getRequestHeader().getURI().getURI() + "] on field: [" + paramName + "], by matching for pattern [" + errorMessagePattern.toString() + "]"); foundTimeBased = true; // yeah, I know. we found an error based, while looking // for a time based. bale out anyways. break; // out of the loop } } // outta the time based loop.. if (foundTimeBased) break; // no error message detected from the time based attack.. continue looking for // time based injection point. // cap the time we will delay by to 10 seconds if (modifiedTimeUsed > 10000) timeExceeded = true; boolean parseTimeEquivalent = false; if (modifiedTimeUsed > previousDelay) { if (this.debugEnabled) log.debug( "The response time " + modifiedTimeUsed + " is > the previous response time " + previousDelay); // in order to rule out false positives due to the increasing SQL parse time // for longer parameter values // we send a random (alphanumeric only) string value of the same length as // the attack parameter // we expect the response time for the SQLi attack to be greater than or // equal to the response time for // the random alphanumeric string parameter // if this is not the case, then we assume that the attack parameter is not // a potential SQL injection causing payload. HttpMessage msgParseDelay = getNewMsg(); String parseDelayCheckParameter = RandomStringUtils.random( newTimeBasedInjectionValue.length(), RANDOM_PARAMETER_CHARS); setParameter(msgParseDelay, paramName, parseDelayCheckParameter); long parseDelayTimeStarted = System.currentTimeMillis(); sendAndReceive(msgParseDelay); countTimeBasedRequests++; long parseDelayTimeUsed = System.currentTimeMillis() - parseDelayTimeStarted; // figure out if the attack delay and the (non-sql-injection) parse delay // are within X ms of each other.. parseTimeEquivalent = (Math.abs(modifiedTimeUsed - parseDelayTimeUsed) < this.parseDelayDifference); if (this.debugEnabled) log.debug( "The parse time a random parameter of the same length is " + parseDelayTimeUsed + ", so the attack and random parameter are " + (parseTimeEquivalent ? "" : "NOT ") + "equivalent (given the user defined attack threshold)"); } if (modifiedTimeUsed > previousDelay && !parseTimeEquivalent) { maxDelayParameter = newTimeBasedInjectionValue; maxDelay = modifiedTimeUsed; // potential for SQL injection, detectable with "numBlobsToCreate" random // blobs being created.. numberOfSequentialIncreases++; if (!potentialTimeBasedSQLInjection) { if (log.isDebugEnabled()) log.debug( "Setting the Detectable Delay parameter to '" + newTimeBasedInjectionValue + "'"); detectableDelayParameter = newTimeBasedInjectionValue; detectableDelay = modifiedTimeUsed; detectableDelayMessage = msgDelay; } potentialTimeBasedSQLInjection = true; } else { // either no SQL injection, invalid SQL syntax, or timing difference is not // detectable with "numBlobsToCreate" random blobs being created. // keep trying with larger numbers of "numBlobsToCreate", since that's the // thing we can most easily control and verify // note also: if for some reason, an earlier attack with a smaller number of // blobs indicated there might be a vulnerability // then this case will rule that out if it was a fluke... // the timing delay must keep increasing, as the number of blobs is // increased. potentialTimeBasedSQLInjection = false; numberOfSequentialIncreases = 0; detectableDelayParameter = null; detectableDelay = 0; detectableDelayMessage = null; maxDelayParameter = null; maxDelay = 0; // do not break at this point, since we may simply need to keep increasing // numBlobsToCreate to // a point where we can detect the resulting delay } if (this.debugEnabled) log.debug( "Time Based SQL Injection test for " + numBlobsToCreate + " random blob bytes: [" + newTimeBasedInjectionValue + "] on field: [" + paramName + "] with value [" + newTimeBasedInjectionValue + "] took " + modifiedTimeUsed + "ms, where the original took " + originalTimeUsed + "ms"); previousDelay = modifiedTimeUsed; // bale out if we were asked nicely if (isStop()) { if (this.debugEnabled) log.debug("Stopping the scan due to a user request"); return; } } // end of for loop to increase the number of random blob bytes to create // the number of times that we could sequentially increase the delay by increasing // the "number of random blob bytes to create" // is the basis for the threshold of the alert. In some cases, the user may want to // see a solid increase in delay // for say 4 or 5 iterations, in order to be confident the vulnerability exists. In // other cases, the user may be happy with just 2 sequential increases... if (this.debugEnabled) log.debug("Number of sequential increases: " + numberOfSequentialIncreases); if (numberOfSequentialIncreases >= this.incrementalDelayIncreasesForAlert) { // Likely a SQL Injection. Raise it String extraInfo = Constant.messages.getString( "ascanbeta.sqlinjection.sqlite.alert.timebased.extrainfo", detectableDelayParameter, detectableDelay, maxDelayParameter, maxDelay, originalParamValue, originalTimeUsed); // raise the alert bingo( Alert.RISK_HIGH, Alert.CONFIDENCE_MEDIUM, getName(), getDescription(), getBaseMsg().getRequestHeader().getURI().getURI(), // url paramName, detectableDelayParameter, extraInfo, getSolution(), extraInfo /*as evidence*/, this.getCweId(), this.getWascId(), detectableDelayMessage); if (this.debugEnabled) log.debug( "A likely Time Based SQL Injection Vulnerability has been found with [" + detectableDelayMessage.getRequestHeader().getMethod() + "] URL [" + detectableDelayMessage .getRequestHeader() .getURI() .getURI() + "] on field: [" + paramName + "]"); // outta the time based loop.. foundTimeBased = true; break; } // the user-define threshold has been exceeded. raise it. // outta the time based loop.. if (foundTimeBased) break; // bale out if we were asked nicely if (isStop()) { if (this.debugEnabled) log.debug("Stopping the scan due to a user request"); return; } } // for each time based SQL index // end of check for SQLite time based SQL Injection // TODO: fix this logic, cos it's broken already. it reports version 2.2 and 4.0.. // (false positives ahoy) doUnionBased = false; // try to get the version of SQLite, using a UNION based SQL injection vulnerability // do this regardless of whether we already found a vulnerability using another // technique. if (doUnionBased) { int unionRequests = 0; // catch 3.0, 3.0.1, 3.0.1.1, 3.7.16.2, etc Pattern versionNumberPattern = Pattern.compile( "[0-9]{1}\\.[0-9]{1,2}\\.[0-9]{1,2}\\.[0-9]{1,2}|[0-9]{1}\\.[0-9]{1,2}\\.[0-9]{1,2}|[0-9]{1}\\.[0-9]{1,2}", PATTERN_PARAM); String candidateValues[] = {"", originalParamValue}; // shonky break label. labels the loop to break out of. I believe I just finished a // sentence with a preposition too. Oh My. unionLoops: for (String sqliteVersionFunction : SQLITE_VERSION_FUNCTIONS) { for (String statementTypeCloser : SYNTACTIC_PREVIOUS_STATEMENT_TYPE_CLOSERS) { for (String statementClauseCloser : SYNTACTIC_PREVIOUS_STATEMENT_CLAUSE_CLOSERS) { for (String unionAdditionalColms : UNION_ADDITIONAL_COLUMNS) { for (String nextStatementCommenter : SYNTACTIC_NEXT_STATEMENT_COMMENTER) { for (String statementUnionStatement : SYNTACTIC_UNION_STATEMENTS) { for (String value : candidateValues) { // are we out of lives yet? // TODO: fix so that the logic does not spin through the // loop headers to get out of all of the nested loops.. // without using the shonky break to label logic if (unionRequests > doUnionMaxRequests) { break unionLoops; } ; String unionAttack = UNION_ATTACK_TEMPLATE; unionAttack = unionAttack.replace( "<<<<SQLITE_VERSION_FUNCTION>>>>", sqliteVersionFunction); unionAttack = unionAttack.replace( "<<<<SYNTACTIC_PREVIOUS_STATEMENT_TYPE_CLOSER>>>>", statementTypeCloser); unionAttack = unionAttack.replace( "<<<<SYNTACTIC_PREVIOUS_STATEMENT_CLAUSE_CLOSER>>>>", statementClauseCloser); unionAttack = unionAttack.replace( "<<<<UNIONADDITIONALCOLUMNS>>>>", unionAdditionalColms); unionAttack = unionAttack.replace( "<<<<SYNTACTIC_NEXT_STATEMENT_COMMENTER>>>>", nextStatementCommenter); unionAttack = unionAttack.replace( "<<<<UNIONSTATEMENT>>>>", statementUnionStatement); unionAttack = unionAttack.replace("<<<<VALUE>>>>", value); if (log.isDebugEnabled()) log.debug( "About to try to determine the SQLite version with [" + unionAttack + "]"); HttpMessage unionAttackMessage = getNewMsg(); setParameter( unionAttackMessage, paramName, unionAttack); sendAndReceive(unionAttackMessage); unionRequests++; // check the response for the version information.. Matcher matcher = versionNumberPattern.matcher( unionAttackMessage .getResponseBody() .toString()); while (matcher.find()) { String versionNumber = matcher.group(); Pattern actualVersionNumberPattern = Pattern.compile( "\\Q" + versionNumber + "\\E", PATTERN_PARAM); if (log.isDebugEnabled()) log.debug( "Found a candidate SQLite version number '" + versionNumber + "'. About to look for the absence of '" + actualVersionNumberPattern + "' in the (re-created) original response body (of length " + originalMessage .getResponseBody() .toString() .length() + ") to validate it"); // if the version number was not in the original* // response, we will call it.. Matcher matcherVersionInOriginal = actualVersionNumberPattern.matcher( originalMessage .getResponseBody() .toString()); if (!matcherVersionInOriginal.find()) { // we have the SQLite version number.. if (log.isDebugEnabled()) log.debug( "We found SQLite version [" + versionNumber + "]"); String extraInfo = Constant.messages.getString( "ascanbeta.sqlinjection.sqlite.alert.versionnumber.extrainfo", versionNumber); // raise the alert bingo( Alert.RISK_HIGH, Alert.CONFIDENCE_MEDIUM, getName() + " - " + versionNumber, getDescription(), getBaseMsg() .getRequestHeader() .getURI() .getURI(), // url paramName, unionAttack, extraInfo, getSolution(), versionNumber /*as evidence*/, this.getCweId(), this.getWascId(), unionAttackMessage); break unionLoops; } } // bale out if we were asked nicely if (isStop()) { if (this.debugEnabled) log.debug( "Stopping the scan due to a user request"); return; } } } } } } } } } // end of doUnionBased } catch (InvalidRedirectLocationException | UnknownHostException | URIException e) { if (log.isDebugEnabled()) { log.debug("Failed to send HTTP message, cause: " + e.getMessage()); } } catch (Exception e) { // Do not try to internationalise this.. we need an error message in any event.. // if it's in English, it's still better than not having it at all. log.error( "An error occurred checking a url for SQLite SQL Injection vulnerabilities", e); } } @Override public int getRisk() { return Alert.RISK_HIGH; } @Override public int getCweId() { return 89; } @Override public int getWascId() { return 19; } }
apache-2.0
charithag/iot-server-samples
AndroidTVGateway/agent/app/src/main/java/org/wso2/androidtv/agent/util/dto/AndroidConfiguration.java
1302
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.androidtv.agent.util.dto; /** * This holds the required cdmf.unit.device.type.android_tv.platform.configuration for agent to connect to the server. */ public class AndroidConfiguration { private String tenantDomain; private String mqttEndpoint; public String getTenantDomain() { return tenantDomain; } public void setTenantDomain(String tenantDomain) { this.tenantDomain = tenantDomain; } public String getMqttEndpoint() { return mqttEndpoint; } public void setMqttEndpoint(String mqttEndpoint) { this.mqttEndpoint = mqttEndpoint; } }
apache-2.0
zhuyulinliu/coolweather
app/src/main/java/coolweather/com/coolweather/ChooseAreaFragment.java
9379
package coolweather.com.coolweather; import android.app.ProgressDialog; import android.content.Intent; import android.os.Bundle; import android.support.annotation.Nullable; import android.support.v4.app.Fragment; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.ListView; import android.widget.TextView; import android.widget.Toast; import org.litepal.crud.DataSupport; import java.io.IOException; import java.util.ArrayList; import java.util.List; import coolweather.com.coolweather.db.City; import coolweather.com.coolweather.db.County; import coolweather.com.coolweather.db.Province; import coolweather.com.coolweather.util.HttpUtil; import coolweather.com.coolweather.util.Utility; import okhttp3.Call; import okhttp3.Callback; import okhttp3.Response; /** * A simple {@link Fragment} subclass. */ public class ChooseAreaFragment extends Fragment { public static final int LEVEL_PROVINCE = 0; public static final int LEVEL_CITY = 1; public static final int LEVEL_COUNTY = 2; private ProgressDialog progressDialog; private TextView titleText; private Button backButton; private ListView listView; private ArrayAdapter<String> adapter; private List<String> dataList = new ArrayList<>(); /** * 省列表 */ private List<Province> provinceList; /** * 市列表 */ private List<City> cityList; /** * 县列表 */ private List<County> countyList; /** * 选中的省 */ private Province selectedProvince; /** * 选中的市 */ private City selectedCity; /** * 当前选中的级别 */ private int currentLevel; @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = inflater.inflate(R.layout.choose_area, container, false); titleText = (TextView) view.findViewById(R.id.title_text); backButton = (Button) view.findViewById(R.id.back_button); listView = (ListView) view.findViewById(R.id.list_view); adapter = new ArrayAdapter<String>(getContext(), android.R.layout.simple_list_item_1, dataList); listView.setAdapter(adapter); return view; } @Override public void onActivityCreated(@Nullable Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); listView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) { if (currentLevel == LEVEL_PROVINCE){ selectedProvince = provinceList.get(i); queryCities(); }else if (currentLevel == LEVEL_CITY){ selectedCity = cityList.get(i); queryCounties(); }else if (currentLevel == LEVEL_COUNTY){ String weatherId = countyList.get(i).getWeatherId(); if (getActivity() instanceof MainActivity){ Intent intent = new Intent(getActivity(),WeatherActivity.class); intent.putExtra("weather_id",weatherId); startActivity(intent); getActivity().finish(); }else if (getActivity() instanceof WeatherActivity){ WeatherActivity activity = (WeatherActivity) getActivity(); activity.mWeatherId = weatherId; activity.drawerLayout.closeDrawers(); activity.swipeRefresh.setRefreshing(true); activity.requestWeather(weatherId); } } } }); backButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { if (currentLevel == LEVEL_CITY){ queryProvinces(); }else if (currentLevel == LEVEL_COUNTY){ queryCities(); } } }); queryProvinces(); } /** * 查询全国所有的省,优先从数据库查询,如果没有查询到再去服务器上查询 */ private void queryProvinces() { titleText.setText("中国"); backButton.setVisibility(View.GONE); provinceList = DataSupport.findAll(Province.class); if (provinceList.size() > 0){ dataList.clear(); for (Province province : provinceList){ dataList.add(province.getProvinceName()); } adapter.notifyDataSetChanged(); listView.setSelection(0); currentLevel = LEVEL_PROVINCE; }else{ String address = "http://guolin.tech/api/china"; queryFromServer(address, "province"); } } /* * 查询选中省内所有的市,优先从数据库查询,如果没有查询到再去服务器上查询 * */ private void queryCities() { titleText.setText(selectedProvince.getProvinceName()); backButton.setVisibility(View.VISIBLE); cityList = DataSupport.where("provinceId = ?", String.valueOf(selectedProvince.getId())).find(City.class); if (cityList.size() > 0){ dataList.clear(); for (City city : cityList){ dataList.add(city.getCityName()); } adapter.notifyDataSetChanged(); listView.setSelection(0); currentLevel = LEVEL_CITY; }else{ String address = "http://guolin.tech/api/china/"+selectedProvince.getProvinceCode(); queryFromServer(address, "city"); } } /* * 查询选中市内所有的县,优先从数据库查询,如果没有查询到再去服务器上查询 * */ private void queryCounties() { titleText.setText(selectedCity.getCityName()); backButton.setVisibility(View.VISIBLE); countyList = DataSupport.where("cityId = ?", String.valueOf(selectedCity.getId())).find(County.class); if (countyList.size() > 0){ dataList.clear(); for (County county : countyList){ dataList.add(county.getCountyName()); } adapter.notifyDataSetChanged(); listView.setSelection(0); currentLevel = LEVEL_COUNTY; }else{ String address = "http://guolin.tech/api/china/"+selectedProvince.getProvinceCode()+"/"+selectedCity.getCityCode(); queryFromServer(address, "county"); } } /** * 从服务器上查询省市县数据 * @param address * @param type */ private void queryFromServer(String address, final String type) { showProgressDialog(); HttpUtil.sendOkHttpRequest(address, new Callback() { @Override public void onFailure(Call call, IOException e) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { closeProgressDialog(); Toast.makeText(getContext(), "加载失败", Toast.LENGTH_SHORT).show(); } }); } @Override public void onResponse(Call call, Response response) throws IOException { String responseText = response.body().string(); boolean result = false; if ("province".equals(type)){ result = Utility.handleProvinceResponse(responseText); }else if ("city".equals(type)){ result = Utility.handleCityResponse(responseText, selectedProvince.getId()); }else if ("county".equals(type)){ result = Utility.handleCountyResponse(responseText, selectedCity.getId()); } if (result){ getActivity().runOnUiThread(new Runnable() { @Override public void run() { closeProgressDialog(); if ("province".equals(type)){ queryProvinces(); }else if ("city".equals(type)){ queryCities(); }else if ("county".equals(type)){ queryCounties(); } } }); } } }); } /** * 关闭进度对话框 */ private void closeProgressDialog() { if (progressDialog != null){ progressDialog.dismiss(); } } /** * 显示进度对话框 */ private void showProgressDialog() { if (progressDialog == null){ progressDialog = new ProgressDialog(getActivity()); progressDialog.setMessage("正在加载..."); progressDialog.setCanceledOnTouchOutside(false); } progressDialog.show(); } }
apache-2.0
tusharm/WebStub
src/main/java/com/thoughtworks/webstub/server/JettyHttpServer.java
1904
package com.thoughtworks.webstub.server; import org.eclipse.jetty.server.Connector; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.server.handler.ContextHandler; import org.eclipse.jetty.server.handler.ContextHandlerCollection; public class JettyHttpServer implements HttpServer { private Server server; private ContextHandlerCollection handlerCollection; public JettyHttpServer(int port) { server = new Server(port); handlerCollection = new ContextHandlerCollection(); server.setHandler(handlerCollection); } public JettyHttpServer() { // random port this(0); } @Override public void addContext(ContextHandler contextHandler) { handlerCollection.addHandler(contextHandler); start(contextHandler); } @Override public int port() { Connector[] connectors = server.getConnectors(); for (Connector connector : connectors) { if (connector instanceof ServerConnector) return ((ServerConnector) connector).getLocalPort(); } throw new IllegalStateException("Couldn't find a server connector; this is absurd!"); } @Override public void start() { try { server.start(); } catch (Exception e) { throw new RuntimeException("Unable to start server", e); } } @Override public void stop() { try { server.stop(); } catch (Exception e) { throw new RuntimeException("Unable to stop server", e); } } private void start(ContextHandler contextHandler) { try { contextHandler.start(); } catch (Exception e) { throw new RuntimeException("Error starting context: " + contextHandler.getDisplayName(), e); } } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-ec2/src/main/java/com/amazonaws/services/ec2/model/transform/ModifyNetworkInterfaceAttributeRequestMarshaller.java
4095
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ec2.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.Request; import com.amazonaws.DefaultRequest; import com.amazonaws.http.HttpMethodName; import com.amazonaws.services.ec2.model.*; import com.amazonaws.transform.Marshaller; import com.amazonaws.util.StringUtils; /** * ModifyNetworkInterfaceAttributeRequest Marshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ModifyNetworkInterfaceAttributeRequestMarshaller implements Marshaller<Request<ModifyNetworkInterfaceAttributeRequest>, ModifyNetworkInterfaceAttributeRequest> { public Request<ModifyNetworkInterfaceAttributeRequest> marshall(ModifyNetworkInterfaceAttributeRequest modifyNetworkInterfaceAttributeRequest) { if (modifyNetworkInterfaceAttributeRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } Request<ModifyNetworkInterfaceAttributeRequest> request = new DefaultRequest<ModifyNetworkInterfaceAttributeRequest>( modifyNetworkInterfaceAttributeRequest, "AmazonEC2"); request.addParameter("Action", "ModifyNetworkInterfaceAttribute"); request.addParameter("Version", "2016-11-15"); request.setHttpMethod(HttpMethodName.POST); NetworkInterfaceAttachmentChanges attachment = modifyNetworkInterfaceAttributeRequest.getAttachment(); if (attachment != null) { if (attachment.getAttachmentId() != null) { request.addParameter("Attachment.AttachmentId", StringUtils.fromString(attachment.getAttachmentId())); } if (attachment.getDeleteOnTermination() != null) { request.addParameter("Attachment.DeleteOnTermination", StringUtils.fromBoolean(attachment.getDeleteOnTermination())); } } if (modifyNetworkInterfaceAttributeRequest.getDescription() != null) { request.addParameter("Description.Value", StringUtils.fromString(modifyNetworkInterfaceAttributeRequest.getDescription())); } com.amazonaws.internal.SdkInternalList<String> modifyNetworkInterfaceAttributeRequestGroupsList = (com.amazonaws.internal.SdkInternalList<String>) modifyNetworkInterfaceAttributeRequest .getGroups(); if (!modifyNetworkInterfaceAttributeRequestGroupsList.isEmpty() || !modifyNetworkInterfaceAttributeRequestGroupsList.isAutoConstruct()) { int groupsListIndex = 1; for (String modifyNetworkInterfaceAttributeRequestGroupsListValue : modifyNetworkInterfaceAttributeRequestGroupsList) { if (modifyNetworkInterfaceAttributeRequestGroupsListValue != null) { request.addParameter("SecurityGroupId." + groupsListIndex, StringUtils.fromString(modifyNetworkInterfaceAttributeRequestGroupsListValue)); } groupsListIndex++; } } if (modifyNetworkInterfaceAttributeRequest.getNetworkInterfaceId() != null) { request.addParameter("NetworkInterfaceId", StringUtils.fromString(modifyNetworkInterfaceAttributeRequest.getNetworkInterfaceId())); } if (modifyNetworkInterfaceAttributeRequest.getSourceDestCheck() != null) { request.addParameter("SourceDestCheck.Value", StringUtils.fromBoolean(modifyNetworkInterfaceAttributeRequest.getSourceDestCheck())); } return request; } }
apache-2.0
SableCC/sablecc
src/org/sablecc/objectmacro/codegeneration/java/UtilsGenerationWalker.java
8226
/* This file is part of SableCC ( http://sablecc.org ). * * See the NOTICE file distributed with this work for copyright information. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sablecc.objectmacro.codegeneration.java; import java.io.*; import org.sablecc.objectmacro.codegeneration.*; import org.sablecc.objectmacro.codegeneration.java.macro.*; import org.sablecc.objectmacro.intermediate.syntax3.analysis.*; import org.sablecc.objectmacro.intermediate.syntax3.node.*; public class UtilsGenerationWalker extends DepthFirstAdapter { private final IntermediateRepresentation ir; private File packageDirectory; private final Macros factory; public UtilsGenerationWalker( IntermediateRepresentation ir, File packageDirectory, Macros factory) { this.ir = ir; this.packageDirectory = packageDirectory; this.factory = factory; } @Override public void caseAIntermediateRepresentation( AIntermediateRepresentation node) { MContext mContext = this.factory.newContext(); MExParameterNull mParameterNull = this.factory.newExParameterNull(); MExIncorrectType mIncorrectType = this.factory.newExIncorrectType(); MExObjectMacroErrorHead mObjectMacroErrorHead = this.factory.newExObjectMacroErrorHead(); MExMacroNullInList mMacroNullInList = this.factory.newExMacroNullInList(); MExCyclicReference mCyclicReference = this.factory.newExCyclicReference(); MExCannotModify mCannotModify = this.factory.newExCannotModify(); MExObjectMacroException mObjectMacroException = this.factory.newExObjectMacroException(); MExVersionNull mExVersionNull = this.factory.newExVersionNull(); MExVersionsDifferent mExVersionsDifferent = this.factory.newExVersionsDifferent(); MExInternalException mExInternalException = this.factory.newExInternalException(); MMacroInternalException mMacroInternalException = this.factory.newMacroInternalException(); MClassValue mClassValue = this.factory.newClassValue(); MClassMacroValue mClassMacroValue = this.factory.newClassMacroValue(); MClassStringValue mClassStringValue = this.factory.newClassStringValue(); MClassCacheBuilder mClassCacheBuilder = this.factory.newClassCacheBuilder(); MCycleDetectorClass mTarjanClass = this.factory.newCycleDetectorClass(); MSuperDirective mSuperDirective = this.factory.newSuperDirective(); MClassAfterLast mClassAfterLast = this.factory.newClassAfterLast(); MClassBeforeFirst mClassBeforeFirst = this.factory.newClassBeforeFirst(); MClassNone mClassNone = this.factory.newClassNone(); MClassSeparator mClassSeparator = this.factory.newClassSeparator(); if (!this.ir.getDestinationPackage().equals("")) { String destinationPackage = this.ir.getDestinationPackage(); MPackageDeclaration mPackageDeclaration = this.factory.newPackageDeclaration(); mPackageDeclaration.addPackageName(destinationPackage); mContext.addPackageDeclaration(mPackageDeclaration); mIncorrectType.addPackageDeclaration(mPackageDeclaration); mParameterNull.addPackageDeclaration(mPackageDeclaration); mObjectMacroErrorHead.addPackageDeclaration(mPackageDeclaration); mMacroNullInList.addPackageDeclaration(mPackageDeclaration); mCyclicReference.addPackageDeclaration(mPackageDeclaration); mCannotModify.addPackageDeclaration(mPackageDeclaration); mObjectMacroException.addPackageDeclaration(mPackageDeclaration); mClassValue.addPackageDeclaration(mPackageDeclaration); mClassMacroValue.addPackageDeclaration(mPackageDeclaration); mClassStringValue.addPackageDeclaration(mPackageDeclaration); mSuperDirective.addPackageDeclaration(mPackageDeclaration); mClassAfterLast.addPackageDeclaration(mPackageDeclaration); mClassBeforeFirst.addPackageDeclaration(mPackageDeclaration); mClassNone.addPackageDeclaration(mPackageDeclaration); mClassSeparator.addPackageDeclaration(mPackageDeclaration); mClassCacheBuilder.addPackageDeclaration(mPackageDeclaration); mTarjanClass.addPackageDeclaration(mPackageDeclaration); mExVersionNull.addPackageDeclaration(mPackageDeclaration); mExVersionsDifferent.addPackageDeclaration(mPackageDeclaration); mExInternalException.addPackageDeclaration(mPackageDeclaration); mMacroInternalException.addPackageDeclaration(mPackageDeclaration); } GenerationUtils.writeFile(this.packageDirectory, "Context.java", mContext.build()); GenerationUtils.writeFile(this.packageDirectory, "MUserErrorIncorrectType.java", mIncorrectType.build()); GenerationUtils.writeFile(this.packageDirectory, "MUserErrorParameterNull.java", mParameterNull.build()); GenerationUtils.writeFile(this.packageDirectory, "MObjectMacroUserErrorHead.java", mObjectMacroErrorHead.build()); GenerationUtils.writeFile(this.packageDirectory, "MUserErrorMacroNullInList.java", mMacroNullInList.build()); GenerationUtils.writeFile(this.packageDirectory, "MUserErrorCyclicReference.java", mCyclicReference.build()); GenerationUtils.writeFile(this.packageDirectory, "MUserErrorCannotModify.java", mCannotModify.build()); GenerationUtils.writeFile(this.packageDirectory, "ObjectMacroException.java", mObjectMacroException.build()); GenerationUtils.writeFile(this.packageDirectory, "Value.java", mClassValue.build()); GenerationUtils.writeFile(this.packageDirectory, "MacroValue.java", mClassMacroValue.build()); GenerationUtils.writeFile(this.packageDirectory, "StringValue.java", mClassStringValue.build()); GenerationUtils.writeFile(this.packageDirectory, "Directive.java", mSuperDirective.build()); GenerationUtils.writeFile(this.packageDirectory, "DAfterLast.java", mClassAfterLast.build()); GenerationUtils.writeFile(this.packageDirectory, "DBeforeFirst.java", mClassBeforeFirst.build()); GenerationUtils.writeFile(this.packageDirectory, "DNone.java", mClassNone.build()); GenerationUtils.writeFile(this.packageDirectory, "DSeparator.java", mClassSeparator.build()); GenerationUtils.writeFile(this.packageDirectory, "CacheBuilder.java", mClassCacheBuilder.build()); GenerationUtils.writeFile(this.packageDirectory, "CycleDetector.java", mTarjanClass.build()); GenerationUtils.writeFile(this.packageDirectory, "MUserErrorVersionNull.java", mExVersionNull.build()); GenerationUtils.writeFile(this.packageDirectory, "MUserErrorVersionsDifferent.java", mExVersionsDifferent.build()); GenerationUtils.writeFile(this.packageDirectory, "InternalException.java", mExInternalException.build()); GenerationUtils.writeFile(this.packageDirectory, "MUserErrorInternalException.java", mMacroInternalException.build()); } }
apache-2.0
gigaroby/hops
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/capacity/TestLeafQueue.java
124432
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.ConcurrentModificationException; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CyclicBarrier; import io.hops.util.DBUtility; import io.hops.util.RMStorageFactory; import io.hops.util.YarnAPIStorageFactory; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.ContainerExitStatus; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerState; import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.QueueACL; import org.apache.hadoop.yarn.api.records.QueueUserACLInfo; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.server.resourcemanager.MockRM; import org.apache.hadoop.yarn.server.resourcemanager.RMContext; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer; import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerEventType; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ActiveUsersManager; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.NodeType; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceLimits; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerApp; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerNode; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAddedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptAddedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptRemovedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppRemovedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager; import org.apache.hadoop.yarn.util.resource.DefaultResourceCalculator; import org.apache.hadoop.yarn.util.resource.ResourceCalculator; import org.apache.hadoop.yarn.util.resource.Resources; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.mockito.Matchers; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; public class TestLeafQueue { private static final Log LOG = LogFactory.getLog(TestLeafQueue.class); private final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null); RMContext rmContext; RMContext spyRMContext; ResourceRequest amResourceRequest; CapacityScheduler cs; CapacitySchedulerConfiguration csConf; CapacitySchedulerContext csContext; CSQueue root; Map<String, CSQueue> queues = new HashMap<String, CSQueue>(); final static int GB = 1024; final static String DEFAULT_RACK = "/default"; private final ResourceCalculator resourceCalculator = new DefaultResourceCalculator(); @Before public void setUp() throws Exception { CapacityScheduler spyCs = new CapacityScheduler(); cs = spy(spyCs); rmContext = TestUtils.getMockRMContext(); spyRMContext = spy(rmContext); ConcurrentMap<ApplicationId, RMApp> spyApps = spy(new ConcurrentHashMap<ApplicationId, RMApp>()); RMApp rmApp = mock(RMApp.class); when(rmApp.getRMAppAttempt((ApplicationAttemptId)Matchers.any())).thenReturn(null); amResourceRequest = mock(ResourceRequest.class); when(amResourceRequest.getCapability()).thenReturn( Resources.createResource(0, 0)); when(rmApp.getAMResourceRequest()).thenReturn(amResourceRequest); Mockito.doReturn(rmApp).when(spyApps).get((ApplicationId)Matchers.any()); when(spyRMContext.getRMApps()).thenReturn(spyApps); csConf = new CapacitySchedulerConfiguration(); csConf.setBoolean("yarn.scheduler.capacity.user-metrics.enable", true); final String newRoot = "root" + System.currentTimeMillis(); setupQueueConfiguration(csConf, newRoot); YarnConfiguration conf = new YarnConfiguration(); cs.setConf(conf); csContext = mock(CapacitySchedulerContext.class); when(csContext.getConfiguration()).thenReturn(csConf); when(csContext.getConf()).thenReturn(conf); when(csContext.getMinimumResourceCapability()). thenReturn(Resources.createResource(GB, 1)); when(csContext.getMaximumResourceCapability()). thenReturn(Resources.createResource(16*GB, 32)); when(csContext.getClusterResource()). thenReturn(Resources.createResource(100 * 16 * GB, 100 * 32)); when(csContext.getApplicationComparator()). thenReturn(CapacityScheduler.applicationComparator); when(csContext.getQueueComparator()). thenReturn(CapacityScheduler.queueComparator); when(csContext.getResourceCalculator()). thenReturn(resourceCalculator); when(csContext.getRMContext()).thenReturn(rmContext); RMContainerTokenSecretManager containerTokenSecretManager = new RMContainerTokenSecretManager(conf); containerTokenSecretManager.rollMasterKey(); when(csContext.getContainerTokenSecretManager()).thenReturn( containerTokenSecretManager); root = CapacityScheduler.parseQueue(csContext, csConf, null, CapacitySchedulerConfiguration.ROOT, queues, queues, TestUtils.spyHook); cs.setRMContext(spyRMContext); cs.init(csConf); cs.start(); when(spyRMContext.getScheduler()).thenReturn(cs); when(spyRMContext.getYarnConfiguration()) .thenReturn(new YarnConfiguration()); when(cs.getNumClusterNodes()).thenReturn(3); } private static final String A = "a"; private static final String B = "b"; private static final String C = "c"; private static final String C1 = "c1"; private static final String D = "d"; private static final String E = "e"; private void setupQueueConfiguration( CapacitySchedulerConfiguration conf, final String newRoot) { // Define top-level queues conf.setQueues(CapacitySchedulerConfiguration.ROOT, new String[] {newRoot}); conf.setMaximumCapacity(CapacitySchedulerConfiguration.ROOT, 100); conf.setAcl(CapacitySchedulerConfiguration.ROOT, QueueACL.SUBMIT_APPLICATIONS, " "); final String Q_newRoot = CapacitySchedulerConfiguration.ROOT + "." + newRoot; conf.setQueues(Q_newRoot, new String[] {A, B, C, D, E}); conf.setCapacity(Q_newRoot, 100); conf.setMaximumCapacity(Q_newRoot, 100); conf.setAcl(Q_newRoot, QueueACL.SUBMIT_APPLICATIONS, " "); final String Q_A = Q_newRoot + "." + A; conf.setCapacity(Q_A, 8.5f); conf.setMaximumCapacity(Q_A, 20); conf.setAcl(Q_A, QueueACL.SUBMIT_APPLICATIONS, "*"); final String Q_B = Q_newRoot + "." + B; conf.setCapacity(Q_B, 80); conf.setMaximumCapacity(Q_B, 99); conf.setAcl(Q_B, QueueACL.SUBMIT_APPLICATIONS, "*"); final String Q_C = Q_newRoot + "." + C; conf.setCapacity(Q_C, 1.5f); conf.setMaximumCapacity(Q_C, 10); conf.setAcl(Q_C, QueueACL.SUBMIT_APPLICATIONS, " "); conf.setQueues(Q_C, new String[] {C1}); final String Q_C1 = Q_C + "." + C1; conf.setCapacity(Q_C1, 100); final String Q_D = Q_newRoot + "." + D; conf.setCapacity(Q_D, 9); conf.setMaximumCapacity(Q_D, 11); conf.setAcl(Q_D, QueueACL.SUBMIT_APPLICATIONS, "user_d"); final String Q_E = Q_newRoot + "." + E; conf.setCapacity(Q_E, 1); conf.setMaximumCapacity(Q_E, 1); conf.setAcl(Q_E, QueueACL.SUBMIT_APPLICATIONS, "user_e"); } static LeafQueue stubLeafQueue(LeafQueue queue) { // Mock some methods for ease in these unit tests // 1. LeafQueue.createContainer to return dummy containers doAnswer( new Answer<Container>() { @Override public Container answer(InvocationOnMock invocation) throws Throwable { final FiCaSchedulerApp application = (FiCaSchedulerApp)(invocation.getArguments()[0]); final ContainerId containerId = TestUtils.getMockContainerId(application); Container container = TestUtils.getMockContainer( containerId, ((FiCaSchedulerNode)(invocation.getArguments()[1])).getNodeID(), (Resource)(invocation.getArguments()[2]), ((Priority)invocation.getArguments()[3])); return container; } } ). when(queue).createContainer( any(FiCaSchedulerApp.class), any(FiCaSchedulerNode.class), any(Resource.class), any(Priority.class) ); // 2. Stub out LeafQueue.parent.completedContainer CSQueue parent = queue.getParent(); doNothing().when(parent).completedContainer( any(Resource.class), any(FiCaSchedulerApp.class), any(FiCaSchedulerNode.class), any(RMContainer.class), any(ContainerStatus.class), any(RMContainerEventType.class), any(CSQueue.class), anyBoolean()); return queue; } @Test public void testInitializeQueue() throws Exception { final float epsilon = 1e-5f; //can add more sturdy test with 3-layer queues //once MAPREDUCE:3410 is resolved LeafQueue a = stubLeafQueue((LeafQueue)queues.get(A)); assertEquals(0.085, a.getCapacity(), epsilon); assertEquals(0.085, a.getAbsoluteCapacity(), epsilon); assertEquals(0.2, a.getMaximumCapacity(), epsilon); assertEquals(0.2, a.getAbsoluteMaximumCapacity(), epsilon); LeafQueue b = stubLeafQueue((LeafQueue)queues.get(B)); assertEquals(0.80, b.getCapacity(), epsilon); assertEquals(0.80, b.getAbsoluteCapacity(), epsilon); assertEquals(0.99, b.getMaximumCapacity(), epsilon); assertEquals(0.99, b.getAbsoluteMaximumCapacity(), epsilon); ParentQueue c = (ParentQueue)queues.get(C); assertEquals(0.015, c.getCapacity(), epsilon); assertEquals(0.015, c.getAbsoluteCapacity(), epsilon); assertEquals(0.1, c.getMaximumCapacity(), epsilon); assertEquals(0.1, c.getAbsoluteMaximumCapacity(), epsilon); //Verify the value for getAMResourceLimit for queues with < .1 maxcap Resource clusterResource = Resource.newInstance(50 * GB, 50); a.updateClusterResource(clusterResource, new ResourceLimits(clusterResource)); assertEquals(Resource.newInstance(1 * GB, 1), a.getAMResourceLimit()); b.updateClusterResource(clusterResource, new ResourceLimits(clusterResource)); assertEquals(Resource.newInstance(5 * GB, 1), b.getAMResourceLimit()); } @Test public void testSingleQueueOneUserMetrics() throws Exception { // Manipulate queue 'a' LeafQueue a = stubLeafQueue((LeafQueue)queues.get(B)); // Users final String user_0 = "user_0"; // Submit applications final ApplicationAttemptId appAttemptId_0 = TestUtils.getMockApplicationAttemptId(0, 0); FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_0, a, mock(ActiveUsersManager.class), spyRMContext); a.submitApplicationAttempt(app_0, user_0); final ApplicationAttemptId appAttemptId_1 = TestUtils.getMockApplicationAttemptId(1, 0); FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_0, a, mock(ActiveUsersManager.class), spyRMContext); a.submitApplicationAttempt(app_1, user_0); // same user // Setup some nodes String host_0 = "127.0.0.1"; FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, DEFAULT_RACK, 0, 8*GB); final int numNodes = 1; Resource clusterResource = Resources.createResource(numNodes * (8*GB), numNodes * 16); when(csContext.getNumClusterNodes()).thenReturn(numNodes); // Setup resource-requests Priority priority = TestUtils.createMockPriority(1); app_0.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 3, true, priority, recordFactory))); // Start testing... // Only 1 container a.assignContainers(clusterResource, node_0, new ResourceLimits( clusterResource)); assertEquals( (int)(node_0.getTotalResource().getMemory() * a.getCapacity()) - (1*GB), a.getMetrics().getAvailableMB()); } @Test public void testUserQueueAcl() throws Exception { // Manipulate queue 'a' LeafQueue d = stubLeafQueue((LeafQueue) queues.get(D)); // Users final String user_d = "user_d"; // Submit applications final ApplicationAttemptId appAttemptId_0 = TestUtils .getMockApplicationAttemptId(0, 1); FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_d, d, null, spyRMContext); d.submitApplicationAttempt(app_0, user_d); // Attempt the same application again final ApplicationAttemptId appAttemptId_1 = TestUtils .getMockApplicationAttemptId(0, 2); FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_d, d, null, spyRMContext); d.submitApplicationAttempt(app_1, user_d); // same user } @Test public void testAppAttemptMetrics() throws Exception { // Manipulate queue 'a' LeafQueue a = stubLeafQueue((LeafQueue) queues.get(B)); // Users final String user_0 = "user_0"; // Submit applications final ApplicationAttemptId appAttemptId_0 = TestUtils .getMockApplicationAttemptId(0, 1); AppAddedSchedulerEvent addAppEvent = new AppAddedSchedulerEvent(appAttemptId_0.getApplicationId(), a.getQueueName(), user_0); cs.handle(addAppEvent); AppAttemptAddedSchedulerEvent addAttemptEvent = new AppAttemptAddedSchedulerEvent(appAttemptId_0, false); cs.handle(addAttemptEvent); AppAttemptRemovedSchedulerEvent event = new AppAttemptRemovedSchedulerEvent( appAttemptId_0, RMAppAttemptState.FAILED, false); cs.handle(event); assertEquals(0, a.getMetrics().getAppsPending()); assertEquals(0, a.getMetrics().getAppsFailed()); // Attempt the same application again final ApplicationAttemptId appAttemptId_1 = TestUtils .getMockApplicationAttemptId(0, 2); FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_0, a, null, spyRMContext); a.submitApplicationAttempt(app_1, user_0); // same user assertEquals(1, a.getMetrics().getAppsSubmitted()); assertEquals(1, a.getMetrics().getAppsPending()); event = new AppAttemptRemovedSchedulerEvent(appAttemptId_0, RMAppAttemptState.FINISHED, false); cs.handle(event); AppRemovedSchedulerEvent rEvent = new AppRemovedSchedulerEvent( appAttemptId_0.getApplicationId(), RMAppState.FINISHED); cs.handle(rEvent); assertEquals(1, a.getMetrics().getAppsSubmitted()); assertEquals(0, a.getMetrics().getAppsPending()); assertEquals(0, a.getMetrics().getAppsFailed()); assertEquals(1, a.getMetrics().getAppsCompleted()); QueueMetrics userMetrics = a.getMetrics().getUserMetrics(user_0); assertEquals(1, userMetrics.getAppsSubmitted()); } @Test public void testSingleQueueWithOneUser() throws Exception { // Manipulate queue 'a' LeafQueue a = stubLeafQueue((LeafQueue)queues.get(A)); //unset maxCapacity a.setMaxCapacity(1.0f); // Users final String user_0 = "user_0"; // Submit applications final ApplicationAttemptId appAttemptId_0 = TestUtils.getMockApplicationAttemptId(0, 0); FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_0, a, mock(ActiveUsersManager.class), spyRMContext); a.submitApplicationAttempt(app_0, user_0); final ApplicationAttemptId appAttemptId_1 = TestUtils.getMockApplicationAttemptId(1, 0); FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_0, a, mock(ActiveUsersManager.class), spyRMContext); a.submitApplicationAttempt(app_1, user_0); // same user // Setup some nodes String host_0 = "127.0.0.1"; FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, DEFAULT_RACK, 0, 8*GB); final int numNodes = 1; Resource clusterResource = Resources.createResource(numNodes * (8*GB), numNodes * 16); when(csContext.getNumClusterNodes()).thenReturn(numNodes); // Setup resource-requests Priority priority = TestUtils.createMockPriority(1); app_0.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 3, true, priority, recordFactory))); app_1.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 2, true, priority, recordFactory))); // Start testing... // Only 1 container a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(1*GB, a.getUsedResources().getMemory()); assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(0*GB, a.getMetrics().getReservedMB()); assertEquals(1*GB, a.getMetrics().getAllocatedMB()); assertEquals(0*GB, a.getMetrics().getAvailableMB()); // Also 2nd -> minCapacity = 1024 since (.1 * 8G) < minAlloc, also // you can get one container more than user-limit a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(2*GB, a.getUsedResources().getMemory()); assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(0*GB, a.getMetrics().getReservedMB()); assertEquals(2*GB, a.getMetrics().getAllocatedMB()); // Can't allocate 3rd due to user-limit a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(2*GB, a.getUsedResources().getMemory()); assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(0*GB, a.getMetrics().getReservedMB()); assertEquals(2*GB, a.getMetrics().getAllocatedMB()); // Bump up user-limit-factor, now allocate should work a.setUserLimitFactor(10); a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(3*GB, a.getUsedResources().getMemory()); assertEquals(3*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(0*GB, a.getMetrics().getReservedMB()); assertEquals(3*GB, a.getMetrics().getAllocatedMB()); // One more should work, for app_1, due to user-limit-factor a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(4*GB, a.getUsedResources().getMemory()); assertEquals(3*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(1*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(0*GB, a.getMetrics().getReservedMB()); assertEquals(4*GB, a.getMetrics().getAllocatedMB()); // Test max-capacity // Now - no more allocs since we are at max-cap a.setMaxCapacity(0.5f); a.assignContainers(clusterResource, node_0, new ResourceLimits( clusterResource)); assertEquals(4*GB, a.getUsedResources().getMemory()); assertEquals(3*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(1*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(0*GB, a.getMetrics().getReservedMB()); assertEquals(4*GB, a.getMetrics().getAllocatedMB()); // Release each container from app_0 for (RMContainer rmContainer : app_0.getLiveContainers()) { a.completedContainer(clusterResource, app_0, node_0, rmContainer, ContainerStatus.newInstance(rmContainer.getContainerId(), ContainerState.COMPLETE, "", ContainerExitStatus.KILLED_BY_RESOURCEMANAGER), RMContainerEventType.KILL, null, true); } assertEquals(1*GB, a.getUsedResources().getMemory()); assertEquals(0*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(1*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(0*GB, a.getMetrics().getReservedMB()); assertEquals(1*GB, a.getMetrics().getAllocatedMB()); // Release each container from app_1 for (RMContainer rmContainer : app_1.getLiveContainers()) { a.completedContainer(clusterResource, app_1, node_0, rmContainer, ContainerStatus.newInstance(rmContainer.getContainerId(), ContainerState.COMPLETE, "", ContainerExitStatus.KILLED_BY_RESOURCEMANAGER), RMContainerEventType.KILL, null, true); } assertEquals(0*GB, a.getUsedResources().getMemory()); assertEquals(0*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(0*GB, a.getMetrics().getReservedMB()); assertEquals(0*GB, a.getMetrics().getAllocatedMB()); assertEquals((int)(a.getCapacity() * node_0.getTotalResource().getMemory()), a.getMetrics().getAvailableMB()); } @Test public void testUserLimits() throws Exception { // Mock the queue LeafQueue a = stubLeafQueue((LeafQueue)queues.get(A)); //unset maxCapacity a.setMaxCapacity(1.0f); // Users final String user_0 = "user_0"; final String user_1 = "user_1"; // Submit applications final ApplicationAttemptId appAttemptId_0 = TestUtils.getMockApplicationAttemptId(0, 0); FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_0, a, a.getActiveUsersManager(), spyRMContext); a.submitApplicationAttempt(app_0, user_0); final ApplicationAttemptId appAttemptId_1 = TestUtils.getMockApplicationAttemptId(1, 0); FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_0, a, a.getActiveUsersManager(), spyRMContext); a.submitApplicationAttempt(app_1, user_0); // same user final ApplicationAttemptId appAttemptId_2 = TestUtils.getMockApplicationAttemptId(2, 0); FiCaSchedulerApp app_2 = new FiCaSchedulerApp(appAttemptId_2, user_1, a, a.getActiveUsersManager(), spyRMContext); a.submitApplicationAttempt(app_2, user_1); // Setup some nodes String host_0 = "127.0.0.1"; FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, DEFAULT_RACK, 0, 8*GB); String host_1 = "127.0.0.2"; FiCaSchedulerNode node_1 = TestUtils.getMockNode(host_1, DEFAULT_RACK, 0, 8*GB); final int numNodes = 2; Resource clusterResource = Resources.createResource(numNodes * (8*GB), numNodes * 16); when(csContext.getNumClusterNodes()).thenReturn(numNodes); // Setup resource-requests Priority priority = TestUtils.createMockPriority(1); app_0.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 2*GB, 1, true, priority, recordFactory))); app_1.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 2, true, priority, recordFactory))); /** * Start testing... */ // Set user-limit a.setUserLimit(50); a.setUserLimitFactor(2); // Now, only user_0 should be active since he is the only one with // outstanding requests assertEquals("There should only be 1 active user!", 1, a.getActiveUsersManager().getNumActiveUsers()); // This commented code is key to test 'activeUsers'. // It should fail the test if uncommented since // it would increase 'activeUsers' to 2 and stop user_2 // Pre MAPREDUCE-3732 this test should fail without this block too // app_2.updateResourceRequests(Collections.singletonList( // TestUtils.createResourceRequest(RMNodeImpl.ANY, 1*GB, 1, priority, // recordFactory))); // 1 container to user_0 a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(2*GB, a.getUsedResources().getMemory()); assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); // Again one to user_0 since he hasn't exceeded user limit yet a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(3*GB, a.getUsedResources().getMemory()); assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(1*GB, app_1.getCurrentConsumption().getMemory()); // One more to user_0 since he is the only active user a.assignContainers(clusterResource, node_1, new ResourceLimits(clusterResource)); assertEquals(4*GB, a.getUsedResources().getMemory()); assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(2*GB, app_1.getCurrentConsumption().getMemory()); } @Test public void testComputeUserLimitAndSetHeadroom(){ LeafQueue qb = stubLeafQueue((LeafQueue)queues.get(B)); qb.setMaxCapacity(1.0f); // Users final String user_0 = "user_0"; final String user_1 = "user_1"; //create nodes String host_0 = "127.0.0.1"; FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, DEFAULT_RACK, 0, 8*GB); String host_1 = "127.0.0.2"; FiCaSchedulerNode node_1 = TestUtils.getMockNode(host_1, DEFAULT_RACK, 0, 8*GB); final int numNodes = 2; Resource clusterResource = Resources.createResource(numNodes * (8*GB), 1); when(csContext.getNumClusterNodes()).thenReturn(numNodes); //our test plan contains three cases //1. single user dominate the queue, we test the headroom //2. two users, but user_0 is assigned 100% of the queue resource, // submit user_1's application, check headroom correctness //3. two users, each is assigned 50% of the queue resource // each user submit one application and check their headrooms //4. similarly to 3. but user_0 has no quote left and there are // free resources left, check headroom //test case 1 qb.setUserLimit(100); qb.setUserLimitFactor(1); final ApplicationAttemptId appAttemptId_0 = TestUtils.getMockApplicationAttemptId(0, 0); FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_0, qb, qb.getActiveUsersManager(), spyRMContext); qb.submitApplicationAttempt(app_0, user_0); Priority u0Priority = TestUtils.createMockPriority(1); app_0.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 4*GB, 1, true, u0Priority, recordFactory))); assertEquals("There should only be 1 active user!", 1, qb.getActiveUsersManager().getNumActiveUsers()); //get headroom qb.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); qb.computeUserLimitAndSetHeadroom(app_0, clusterResource, app_0 .getResourceRequest(u0Priority, ResourceRequest.ANY).getCapability(), null); //maxqueue 16G, userlimit 13G, - 4G used = 9G assertEquals(9*GB,app_0.getHeadroom().getMemory()); //test case 2 final ApplicationAttemptId appAttemptId_2 = TestUtils.getMockApplicationAttemptId(2, 0); FiCaSchedulerApp app_2 = new FiCaSchedulerApp(appAttemptId_2, user_1, qb, qb.getActiveUsersManager(), spyRMContext); Priority u1Priority = TestUtils.createMockPriority(2); app_2.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 4*GB, 1, true, u1Priority, recordFactory))); qb.submitApplicationAttempt(app_2, user_1); qb.assignContainers(clusterResource, node_1, new ResourceLimits(clusterResource)); qb.computeUserLimitAndSetHeadroom(app_0, clusterResource, app_0 .getResourceRequest(u0Priority, ResourceRequest.ANY).getCapability(), null); assertEquals(8*GB, qb.getUsedResources().getMemory()); assertEquals(4*GB, app_0.getCurrentConsumption().getMemory()); //maxqueue 16G, userlimit 13G, - 4G used = 9G BUT //maxqueue 16G - used 8G (4 each app/user) = 8G max headroom (the new logic) assertEquals(8*GB, app_0.getHeadroom().getMemory()); assertEquals(4*GB, app_2.getCurrentConsumption().getMemory()); assertEquals(8*GB, app_2.getHeadroom().getMemory()); //test case 3 qb.finishApplication(app_0.getApplicationId(), user_0); qb.finishApplication(app_2.getApplicationId(), user_1); qb.releaseResource(clusterResource, app_0, app_0.getResource(u0Priority), null); qb.releaseResource(clusterResource, app_2, app_2.getResource(u1Priority), null); qb.setUserLimit(50); qb.setUserLimitFactor(1); final ApplicationAttemptId appAttemptId_1 = TestUtils.getMockApplicationAttemptId(1, 0); FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_0, qb, qb.getActiveUsersManager(), spyRMContext); final ApplicationAttemptId appAttemptId_3 = TestUtils.getMockApplicationAttemptId(3, 0); FiCaSchedulerApp app_3 = new FiCaSchedulerApp(appAttemptId_3, user_1, qb, qb.getActiveUsersManager(), spyRMContext); app_1.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 2*GB, 1, true, u0Priority, recordFactory))); app_3.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 2*GB, 1, true, u1Priority, recordFactory))); qb.submitApplicationAttempt(app_1, user_0); qb.submitApplicationAttempt(app_3, user_1); qb.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); qb.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); qb.computeUserLimitAndSetHeadroom(app_3, clusterResource, app_3 .getResourceRequest(u1Priority, ResourceRequest.ANY).getCapability(), null); assertEquals(4*GB, qb.getUsedResources().getMemory()); //maxqueue 16G, userlimit 7G, used (by each user) 2G, headroom 5G (both) assertEquals(5*GB, app_3.getHeadroom().getMemory()); assertEquals(5*GB, app_1.getHeadroom().getMemory()); //test case 4 final ApplicationAttemptId appAttemptId_4 = TestUtils.getMockApplicationAttemptId(4, 0); FiCaSchedulerApp app_4 = new FiCaSchedulerApp(appAttemptId_4, user_0, qb, qb.getActiveUsersManager(), spyRMContext); qb.submitApplicationAttempt(app_4, user_0); app_4.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 6*GB, 1, true, u0Priority, recordFactory))); qb.assignContainers(clusterResource, node_1, new ResourceLimits(clusterResource)); qb.computeUserLimitAndSetHeadroom(app_4, clusterResource, app_4 .getResourceRequest(u0Priority, ResourceRequest.ANY).getCapability(), null); qb.computeUserLimitAndSetHeadroom(app_3, clusterResource, app_3 .getResourceRequest(u1Priority, ResourceRequest.ANY).getCapability(), null); //app3 is user1, active from last test case //maxqueue 16G, userlimit 13G, used 2G, would be headroom 10G BUT //10G in use, so max possible headroom is 6G (new logic) assertEquals(6*GB, app_3.getHeadroom().getMemory()); //testcase3 still active - 2+2+6=10 assertEquals(10*GB, qb.getUsedResources().getMemory()); //app4 is user 0 //maxqueue 16G, userlimit 13G, used 8G, headroom 5G //(8G used is 6G from this test case - app4, 2 from last test case, app_1) assertEquals(5*GB, app_4.getHeadroom().getMemory()); } @Test public void testUserHeadroomMultiApp() throws Exception { // Mock the queue LeafQueue a = stubLeafQueue((LeafQueue)queues.get(A)); //unset maxCapacity a.setMaxCapacity(1.0f); // Users final String user_0 = "user_0"; final String user_1 = "user_1"; // Submit applications final ApplicationAttemptId appAttemptId_0 = TestUtils.getMockApplicationAttemptId(0, 0); FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_0, a, a.getActiveUsersManager(), spyRMContext); a.submitApplicationAttempt(app_0, user_0); final ApplicationAttemptId appAttemptId_1 = TestUtils.getMockApplicationAttemptId(1, 0); FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_0, a, a.getActiveUsersManager(), spyRMContext); a.submitApplicationAttempt(app_1, user_0); // same user final ApplicationAttemptId appAttemptId_2 = TestUtils.getMockApplicationAttemptId(2, 0); FiCaSchedulerApp app_2 = new FiCaSchedulerApp(appAttemptId_2, user_1, a, a.getActiveUsersManager(), spyRMContext); a.submitApplicationAttempt(app_2, user_1); // Setup some nodes String host_0 = "127.0.0.1"; FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, DEFAULT_RACK, 0, 16*GB); String host_1 = "127.0.0.2"; FiCaSchedulerNode node_1 = TestUtils.getMockNode(host_1, DEFAULT_RACK, 0, 16*GB); final int numNodes = 2; Resource clusterResource = Resources.createResource(numNodes * (16*GB), 1); when(csContext.getNumClusterNodes()).thenReturn(numNodes); Priority priority = TestUtils.createMockPriority(1); app_0.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 1, true, priority, recordFactory))); a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(1*GB, a.getUsedResources().getMemory()); assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); //Now, headroom is the same for all apps for a given user + queue combo //and a change to any app's headroom is reflected for all the user's apps //once those apps are active/have themselves calculated headroom for //allocation at least one time assertEquals(2*GB, app_0.getHeadroom().getMemory()); assertEquals(0*GB, app_1.getHeadroom().getMemory());//not yet active assertEquals(0*GB, app_2.getHeadroom().getMemory());//not yet active app_1.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 2, true, priority, recordFactory))); a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(2*GB, a.getUsedResources().getMemory()); assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(1*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(1*GB, app_0.getHeadroom().getMemory()); assertEquals(1*GB, app_1.getHeadroom().getMemory());//now active assertEquals(0*GB, app_2.getHeadroom().getMemory());//not yet active //Complete container and verify that headroom is updated, for both apps //for the user RMContainer rmContainer = app_0.getLiveContainers().iterator().next(); a.completedContainer(clusterResource, app_0, node_0, rmContainer, ContainerStatus.newInstance(rmContainer.getContainerId(), ContainerState.COMPLETE, "", ContainerExitStatus.KILLED_BY_RESOURCEMANAGER), RMContainerEventType.KILL, null, true); assertEquals(2*GB, app_0.getHeadroom().getMemory()); assertEquals(2*GB, app_1.getHeadroom().getMemory()); } @Test public void testHeadroomWithMaxCap() throws Exception { // Mock the queue LeafQueue a = stubLeafQueue((LeafQueue)queues.get(A)); //unset maxCapacity a.setMaxCapacity(1.0f); // Users final String user_0 = "user_0"; final String user_1 = "user_1"; // Submit applications final ApplicationAttemptId appAttemptId_0 = TestUtils.getMockApplicationAttemptId(0, 0); FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_0, a, a.getActiveUsersManager(), spyRMContext); a.submitApplicationAttempt(app_0, user_0); final ApplicationAttemptId appAttemptId_1 = TestUtils.getMockApplicationAttemptId(1, 0); FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_0, a, a.getActiveUsersManager(), spyRMContext); a.submitApplicationAttempt(app_1, user_0); // same user final ApplicationAttemptId appAttemptId_2 = TestUtils.getMockApplicationAttemptId(2, 0); FiCaSchedulerApp app_2 = new FiCaSchedulerApp(appAttemptId_2, user_1, a, a.getActiveUsersManager(), spyRMContext); a.submitApplicationAttempt(app_2, user_1); // Setup some nodes String host_0 = "127.0.0.1"; FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, DEFAULT_RACK, 0, 8*GB); String host_1 = "127.0.0.2"; FiCaSchedulerNode node_1 = TestUtils.getMockNode(host_1, DEFAULT_RACK, 0, 8*GB); final int numNodes = 2; Resource clusterResource = Resources.createResource(numNodes * (8*GB), 1); when(csContext.getNumClusterNodes()).thenReturn(numNodes); // Setup resource-requests Priority priority = TestUtils.createMockPriority(1); app_0.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 2*GB, 1, true, priority, recordFactory))); app_1.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 2, true, priority, recordFactory))); /** * Start testing... */ // Set user-limit a.setUserLimit(50); a.setUserLimitFactor(2); // Now, only user_0 should be active since he is the only one with // outstanding requests assertEquals("There should only be 1 active user!", 1, a.getActiveUsersManager().getNumActiveUsers()); // 1 container to user_0 a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(2*GB, a.getUsedResources().getMemory()); assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(2*GB, app_0.getHeadroom().getMemory()); // User limit = 4G, 2 in use assertEquals(0*GB, app_1.getHeadroom().getMemory()); // the application is not yet active // Again one to user_0 since he hasn't exceeded user limit yet a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(3*GB, a.getUsedResources().getMemory()); assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(1*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(1*GB, app_0.getHeadroom().getMemory()); // 4G - 3G assertEquals(1*GB, app_1.getHeadroom().getMemory()); // 4G - 3G // Submit requests for app_1 and set max-cap a.setMaxCapacity(.1f); app_2.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 1, true, priority, recordFactory))); assertEquals(2, a.getActiveUsersManager().getNumActiveUsers()); // No more to user_0 since he is already over user-limit // and no more containers to queue since it's already at max-cap a.assignContainers(clusterResource, node_1, new ResourceLimits(clusterResource)); assertEquals(3*GB, a.getUsedResources().getMemory()); assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(1*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_2.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_0.getHeadroom().getMemory()); assertEquals(0*GB, app_1.getHeadroom().getMemory()); // Check headroom for app_2 app_1.updateResourceRequests(Collections.singletonList( // unset TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 0, true, priority, recordFactory))); assertEquals(1, a.getActiveUsersManager().getNumActiveUsers()); a.assignContainers(clusterResource, node_1, new ResourceLimits(clusterResource)); assertEquals(0*GB, app_2.getHeadroom().getMemory()); // hit queue max-cap } @Test public void testSingleQueueWithMultipleUsers() throws Exception { // Mock the queue LeafQueue a = stubLeafQueue((LeafQueue)queues.get(A)); //unset maxCapacity a.setMaxCapacity(1.0f); // Users final String user_0 = "user_0"; final String user_1 = "user_1"; final String user_2 = "user_2"; // Submit applications final ApplicationAttemptId appAttemptId_0 = TestUtils.getMockApplicationAttemptId(0, 0); FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_0, a, a.getActiveUsersManager(), spyRMContext); a.submitApplicationAttempt(app_0, user_0); final ApplicationAttemptId appAttemptId_1 = TestUtils.getMockApplicationAttemptId(1, 0); FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_0, a, a.getActiveUsersManager(), spyRMContext); a.submitApplicationAttempt(app_1, user_0); // same user final ApplicationAttemptId appAttemptId_2 = TestUtils.getMockApplicationAttemptId(2, 0); FiCaSchedulerApp app_2 = new FiCaSchedulerApp(appAttemptId_2, user_1, a, a.getActiveUsersManager(), spyRMContext); a.submitApplicationAttempt(app_2, user_1); final ApplicationAttemptId appAttemptId_3 = TestUtils.getMockApplicationAttemptId(3, 0); FiCaSchedulerApp app_3 = new FiCaSchedulerApp(appAttemptId_3, user_2, a, a.getActiveUsersManager(), spyRMContext); a.submitApplicationAttempt(app_3, user_2); // Setup some nodes String host_0 = "127.0.0.1"; FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, DEFAULT_RACK, 0, 8*GB); final int numNodes = 1; Resource clusterResource = Resources.createResource(numNodes * (8*GB), numNodes * 16); when(csContext.getNumClusterNodes()).thenReturn(numNodes); // Setup resource-requests Priority priority = TestUtils.createMockPriority(1); app_0.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 10, true, priority, recordFactory))); app_1.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 10, true, priority, recordFactory))); /** * Start testing... */ // Only 1 container a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(1*GB, a.getUsedResources().getMemory()); assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); // Also 2nd -> minCapacity = 1024 since (.1 * 8G) < minAlloc, also // you can get one container more than user-limit a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(2*GB, a.getUsedResources().getMemory()); assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); // Can't allocate 3rd due to user-limit a.setUserLimit(25); a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(2*GB, a.getUsedResources().getMemory()); assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); // Submit resource requests for other apps now to 'activate' them app_2.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 3*GB, 1, true, priority, recordFactory))); app_3.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 2, true, priority, recordFactory))); // Now allocations should goto app_2 since // user_0 is at limit inspite of high user-limit-factor a.setUserLimitFactor(10); a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(5*GB, a.getUsedResources().getMemory()); assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(3*GB, app_2.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_3.getCurrentConsumption().getMemory()); // Now allocations should goto app_0 since // user_0 is at user-limit not above it a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(6*GB, a.getUsedResources().getMemory()); assertEquals(3*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(3*GB, app_2.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_3.getCurrentConsumption().getMemory()); // Test max-capacity // Now - no more allocs since we are at max-cap a.setMaxCapacity(0.5f); a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(6*GB, a.getUsedResources().getMemory()); assertEquals(3*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(3*GB, app_2.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_3.getCurrentConsumption().getMemory()); // Revert max-capacity and user-limit-factor // Now, allocations should goto app_3 since it's under user-limit a.setMaxCapacity(1.0f); a.setUserLimitFactor(1); a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(7*GB, a.getUsedResources().getMemory()); assertEquals(3*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(3*GB, app_2.getCurrentConsumption().getMemory()); assertEquals(1*GB, app_3.getCurrentConsumption().getMemory()); // Now we should assign to app_3 again since user_2 is under user-limit a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(8*GB, a.getUsedResources().getMemory()); assertEquals(3*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(3*GB, app_2.getCurrentConsumption().getMemory()); assertEquals(2*GB, app_3.getCurrentConsumption().getMemory()); // 8. Release each container from app_0 for (RMContainer rmContainer : app_0.getLiveContainers()) { a.completedContainer(clusterResource, app_0, node_0, rmContainer, ContainerStatus.newInstance(rmContainer.getContainerId(), ContainerState.COMPLETE, "", ContainerExitStatus.KILLED_BY_RESOURCEMANAGER), RMContainerEventType.KILL, null, true); } assertEquals(5*GB, a.getUsedResources().getMemory()); assertEquals(0*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(3*GB, app_2.getCurrentConsumption().getMemory()); assertEquals(2*GB, app_3.getCurrentConsumption().getMemory()); // 9. Release each container from app_2 for (RMContainer rmContainer : app_2.getLiveContainers()) { a.completedContainer(clusterResource, app_2, node_0, rmContainer, ContainerStatus.newInstance(rmContainer.getContainerId(), ContainerState.COMPLETE, "", ContainerExitStatus.KILLED_BY_RESOURCEMANAGER), RMContainerEventType.KILL, null, true); } assertEquals(2*GB, a.getUsedResources().getMemory()); assertEquals(0*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_2.getCurrentConsumption().getMemory()); assertEquals(2*GB, app_3.getCurrentConsumption().getMemory()); // 10. Release each container from app_3 for (RMContainer rmContainer : app_3.getLiveContainers()) { a.completedContainer(clusterResource, app_3, node_0, rmContainer, ContainerStatus.newInstance(rmContainer.getContainerId(), ContainerState.COMPLETE, "", ContainerExitStatus.KILLED_BY_RESOURCEMANAGER), RMContainerEventType.KILL, null, true); } assertEquals(0*GB, a.getUsedResources().getMemory()); assertEquals(0*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_2.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_3.getCurrentConsumption().getMemory()); } @Test public void testReservation() throws Exception { // Manipulate queue 'a' LeafQueue a = stubLeafQueue((LeafQueue)queues.get(A)); //unset maxCapacity a.setMaxCapacity(1.0f); // Users final String user_0 = "user_0"; final String user_1 = "user_1"; // Submit applications final ApplicationAttemptId appAttemptId_0 = TestUtils.getMockApplicationAttemptId(0, 0); FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_0, a, mock(ActiveUsersManager.class), spyRMContext); a.submitApplicationAttempt(app_0, user_0); final ApplicationAttemptId appAttemptId_1 = TestUtils.getMockApplicationAttemptId(1, 0); FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_1, a, mock(ActiveUsersManager.class), spyRMContext); a.submitApplicationAttempt(app_1, user_1); // Setup some nodes String host_0 = "127.0.0.1"; FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, DEFAULT_RACK, 0, 4*GB); final int numNodes = 2; Resource clusterResource = Resources.createResource(numNodes * (4*GB), numNodes * 16); when(csContext.getNumClusterNodes()).thenReturn(numNodes); // Setup resource-requests Priority priority = TestUtils.createMockPriority(1); app_0.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 2, true, priority, recordFactory))); app_1.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 4*GB, 1, true, priority, recordFactory))); // Start testing... // Only 1 container a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(1*GB, a.getUsedResources().getMemory()); assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(0*GB, a.getMetrics().getReservedMB()); assertEquals(1*GB, a.getMetrics().getAllocatedMB()); assertEquals(0*GB, a.getMetrics().getAvailableMB()); // Also 2nd -> minCapacity = 1024 since (.1 * 8G) < minAlloc, also // you can get one container more than user-limit a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(2*GB, a.getUsedResources().getMemory()); assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(0*GB, a.getMetrics().getReservedMB()); assertEquals(2*GB, a.getMetrics().getAllocatedMB()); // Now, reservation should kick in for app_1 a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(6*GB, a.getUsedResources().getMemory()); assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(4*GB, app_1.getCurrentReservation().getMemory()); assertEquals(2*GB, node_0.getUsedResource().getMemory()); assertEquals(4*GB, a.getMetrics().getReservedMB()); assertEquals(2*GB, a.getMetrics().getAllocatedMB()); // Now free 1 container from app_0 i.e. 1G RMContainer rmContainer = app_0.getLiveContainers().iterator().next(); a.completedContainer(clusterResource, app_0, node_0, rmContainer, ContainerStatus.newInstance(rmContainer.getContainerId(), ContainerState.COMPLETE, "", ContainerExitStatus.KILLED_BY_RESOURCEMANAGER), RMContainerEventType.KILL, null, true); a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(5*GB, a.getUsedResources().getMemory()); assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(4*GB, app_1.getCurrentReservation().getMemory()); assertEquals(1*GB, node_0.getUsedResource().getMemory()); assertEquals(4*GB, a.getMetrics().getReservedMB()); assertEquals(1*GB, a.getMetrics().getAllocatedMB()); // Now finish another container from app_0 and fulfill the reservation rmContainer = app_0.getLiveContainers().iterator().next(); a.completedContainer(clusterResource, app_0, node_0, rmContainer, ContainerStatus.newInstance(rmContainer.getContainerId(), ContainerState.COMPLETE, "", ContainerExitStatus.KILLED_BY_RESOURCEMANAGER), RMContainerEventType.KILL, null, true); a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(4*GB, a.getUsedResources().getMemory()); assertEquals(0*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(4*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentReservation().getMemory()); assertEquals(4*GB, node_0.getUsedResource().getMemory()); assertEquals(0*GB, a.getMetrics().getReservedMB()); assertEquals(4*GB, a.getMetrics().getAllocatedMB()); } @Test public void testStolenReservedContainer() throws Exception { // Manipulate queue 'a' LeafQueue a = stubLeafQueue((LeafQueue)queues.get(A)); //unset maxCapacity a.setMaxCapacity(1.0f); // Users final String user_0 = "user_0"; final String user_1 = "user_1"; // Submit applications final ApplicationAttemptId appAttemptId_0 = TestUtils.getMockApplicationAttemptId(0, 0); FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_0, a, mock(ActiveUsersManager.class), spyRMContext); a.submitApplicationAttempt(app_0, user_0); final ApplicationAttemptId appAttemptId_1 = TestUtils.getMockApplicationAttemptId(1, 0); FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_1, a, mock(ActiveUsersManager.class), spyRMContext); a.submitApplicationAttempt(app_1, user_1); // Setup some nodes String host_0 = "127.0.0.1"; FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, DEFAULT_RACK, 0, 4*GB); String host_1 = "127.0.0.2"; FiCaSchedulerNode node_1 = TestUtils.getMockNode(host_1, DEFAULT_RACK, 0, 4*GB); final int numNodes = 3; Resource clusterResource = Resources.createResource(numNodes * (4*GB), numNodes * 16); when(csContext.getNumClusterNodes()).thenReturn(numNodes); // Setup resource-requests Priority priority = TestUtils.createMockPriority(1); app_0.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 2*GB, 1, true, priority, recordFactory))); // Setup app_1 to request a 4GB container on host_0 and // another 4GB container anywhere. ArrayList<ResourceRequest> appRequests_1 = new ArrayList<ResourceRequest>(4); appRequests_1.add(TestUtils.createResourceRequest(host_0, 4*GB, 1, true, priority, recordFactory)); appRequests_1.add(TestUtils.createResourceRequest(DEFAULT_RACK, 4*GB, 1, true, priority, recordFactory)); appRequests_1.add(TestUtils.createResourceRequest(ResourceRequest.ANY, 4*GB, 2, true, priority, recordFactory)); app_1.updateResourceRequests(appRequests_1); // Start testing... a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(2*GB, a.getUsedResources().getMemory()); assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(0*GB, a.getMetrics().getReservedMB()); assertEquals(2*GB, a.getMetrics().getAllocatedMB()); assertEquals(0*GB, a.getMetrics().getAvailableMB()); // Now, reservation should kick in for app_1 a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(6*GB, a.getUsedResources().getMemory()); assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(4*GB, app_1.getCurrentReservation().getMemory()); assertEquals(2*GB, node_0.getUsedResource().getMemory()); assertEquals(4*GB, a.getMetrics().getReservedMB()); assertEquals(2*GB, a.getMetrics().getAllocatedMB()); // node_1 heartbeats in and gets the DEFAULT_RACK request for app_1 // We do not need locality delay here doReturn(-1).when(a).getNodeLocalityDelay(); a.assignContainers(clusterResource, node_1, new ResourceLimits(clusterResource)); assertEquals(10*GB, a.getUsedResources().getMemory()); assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(4*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(4*GB, app_1.getCurrentReservation().getMemory()); assertEquals(4*GB, node_1.getUsedResource().getMemory()); assertEquals(4*GB, a.getMetrics().getReservedMB()); assertEquals(6*GB, a.getMetrics().getAllocatedMB()); // Now free 1 container from app_0 and try to assign to node_0 RMContainer rmContainer = app_0.getLiveContainers().iterator().next(); a.completedContainer(clusterResource, app_0, node_0, rmContainer, ContainerStatus.newInstance(rmContainer.getContainerId(), ContainerState.COMPLETE, "", ContainerExitStatus.KILLED_BY_RESOURCEMANAGER), RMContainerEventType.KILL, null, true); a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(8*GB, a.getUsedResources().getMemory()); assertEquals(0*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(8*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentReservation().getMemory()); assertEquals(4*GB, node_0.getUsedResource().getMemory()); assertEquals(0*GB, a.getMetrics().getReservedMB()); assertEquals(8*GB, a.getMetrics().getAllocatedMB()); } @Test public void testReservationExchange() throws Exception { // Manipulate queue 'a' LeafQueue a = stubLeafQueue((LeafQueue)queues.get(A)); //unset maxCapacity a.setMaxCapacity(1.0f); a.setUserLimitFactor(10); // Users final String user_0 = "user_0"; final String user_1 = "user_1"; // Submit applications final ApplicationAttemptId appAttemptId_0 = TestUtils.getMockApplicationAttemptId(0, 0); FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_0, a, mock(ActiveUsersManager.class), spyRMContext); a.submitApplicationAttempt(app_0, user_0); final ApplicationAttemptId appAttemptId_1 = TestUtils.getMockApplicationAttemptId(1, 0); FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_1, a, mock(ActiveUsersManager.class), spyRMContext); a.submitApplicationAttempt(app_1, user_1); // Setup some nodes String host_0 = "127.0.0.1"; FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, DEFAULT_RACK, 0, 4*GB); String host_1 = "127.0.0.2"; FiCaSchedulerNode node_1 = TestUtils.getMockNode(host_1, DEFAULT_RACK, 0, 4*GB); final int numNodes = 3; Resource clusterResource = Resources.createResource(numNodes * (4*GB), numNodes * 16); when(csContext.getNumClusterNodes()).thenReturn(numNodes); when(csContext.getMaximumResourceCapability()).thenReturn( Resources.createResource(4*GB, 16)); when(a.getMaximumAllocation()).thenReturn( Resources.createResource(4*GB, 16)); when(a.getMinimumAllocationFactor()).thenReturn(0.25f); // 1G / 4G // Setup resource-requests Priority priority = TestUtils.createMockPriority(1); app_0.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 2, true, priority, recordFactory))); app_1.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 4*GB, 1, true, priority, recordFactory))); // Start testing... // Only 1 container a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(1*GB, a.getUsedResources().getMemory()); assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); // Also 2nd -> minCapacity = 1024 since (.1 * 8G) < minAlloc, also // you can get one container more than user-limit a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(2*GB, a.getUsedResources().getMemory()); assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); // Now, reservation should kick in for app_1 a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(6*GB, a.getUsedResources().getMemory()); assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(4*GB, app_1.getCurrentReservation().getMemory()); assertEquals(2*GB, node_0.getUsedResource().getMemory()); // Now free 1 container from app_0 i.e. 1G, and re-reserve it RMContainer rmContainer = app_0.getLiveContainers().iterator().next(); a.completedContainer(clusterResource, app_0, node_0, rmContainer, ContainerStatus.newInstance(rmContainer.getContainerId(), ContainerState.COMPLETE, "", ContainerExitStatus.KILLED_BY_RESOURCEMANAGER), RMContainerEventType.KILL, null, true); a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(5*GB, a.getUsedResources().getMemory()); assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(4*GB, app_1.getCurrentReservation().getMemory()); assertEquals(1*GB, node_0.getUsedResource().getMemory()); assertEquals(1, app_1.getReReservations(priority)); // Re-reserve a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(5*GB, a.getUsedResources().getMemory()); assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(4*GB, app_1.getCurrentReservation().getMemory()); assertEquals(1*GB, node_0.getUsedResource().getMemory()); assertEquals(2, app_1.getReReservations(priority)); // Try to schedule on node_1 now, should *move* the reservation a.assignContainers(clusterResource, node_1, new ResourceLimits(clusterResource)); assertEquals(9*GB, a.getUsedResources().getMemory()); assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(4*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(4*GB, app_1.getCurrentReservation().getMemory()); assertEquals(4*GB, node_1.getUsedResource().getMemory()); // Doesn't change yet... only when reservation is cancelled or a different // container is reserved assertEquals(2, app_1.getReReservations(priority)); // Now finish another container from app_0 and see the reservation cancelled rmContainer = app_0.getLiveContainers().iterator().next(); a.completedContainer(clusterResource, app_0, node_0, rmContainer, ContainerStatus.newInstance(rmContainer.getContainerId(), ContainerState.COMPLETE, "", ContainerExitStatus.KILLED_BY_RESOURCEMANAGER), RMContainerEventType.KILL, null, true); CSAssignment assignment = a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(8*GB, a.getUsedResources().getMemory()); assertEquals(0*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(4*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(4*GB, app_1.getCurrentReservation().getMemory()); assertEquals(0*GB, node_0.getUsedResource().getMemory()); assertEquals(4*GB, assignment.getExcessReservation().getContainer().getResource().getMemory()); } @Test public void testLocalityScheduling() throws Exception { // Manipulate queue 'a' LeafQueue a = stubLeafQueue((LeafQueue)queues.get(A)); // User String user_0 = "user_0"; // Submit applications final ApplicationAttemptId appAttemptId_0 = TestUtils.getMockApplicationAttemptId(0, 0); FiCaSchedulerApp app_0 = spy(new FiCaSchedulerApp(appAttemptId_0, user_0, a, mock(ActiveUsersManager.class), spyRMContext)); a.submitApplicationAttempt(app_0, user_0); // Setup some nodes and racks String host_0 = "127.0.0.1"; String rack_0 = "rack_0"; FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, rack_0, 0, 8*GB); String host_1 = "127.0.0.2"; String rack_1 = "rack_1"; FiCaSchedulerNode node_1 = TestUtils.getMockNode(host_1, rack_1, 0, 8*GB); String host_2 = "127.0.0.3"; String rack_2 = "rack_2"; FiCaSchedulerNode node_2 = TestUtils.getMockNode(host_2, rack_2, 0, 8*GB); final int numNodes = 3; Resource clusterResource = Resources.createResource(numNodes * (8*GB), numNodes * 16); when(csContext.getNumClusterNodes()).thenReturn(numNodes); // Setup resource-requests and submit Priority priority = TestUtils.createMockPriority(1); List<ResourceRequest> app_0_requests_0 = new ArrayList<ResourceRequest>(); app_0_requests_0.add( TestUtils.createResourceRequest(host_0, 1*GB, 1, true, priority, recordFactory)); app_0_requests_0.add( TestUtils.createResourceRequest(rack_0, 1*GB, 1, true, priority, recordFactory)); app_0_requests_0.add( TestUtils.createResourceRequest(host_1, 1*GB, 1, true, priority, recordFactory)); app_0_requests_0.add( TestUtils.createResourceRequest(rack_1, 1*GB, 1, true, priority, recordFactory)); app_0_requests_0.add( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 3, // one extra true, priority, recordFactory)); app_0.updateResourceRequests(app_0_requests_0); // Start testing... CSAssignment assignment = null; // Start with off switch, shouldn't allocate due to delay scheduling assignment = a.assignContainers(clusterResource, node_2, new ResourceLimits(clusterResource)); verify(app_0, never()).allocate(any(NodeType.class), eq(node_2), any(Priority.class), any(ResourceRequest.class), any(Container.class)); assertEquals(1, app_0.getSchedulingOpportunities(priority)); assertEquals(3, app_0.getTotalRequiredResources(priority)); assertEquals(NodeType.NODE_LOCAL, assignment.getType()); // None->NODE_LOCAL // Another off switch, shouldn't allocate due to delay scheduling assignment = a.assignContainers(clusterResource, node_2, new ResourceLimits(clusterResource)); verify(app_0, never()).allocate(any(NodeType.class), eq(node_2), any(Priority.class), any(ResourceRequest.class), any(Container.class)); assertEquals(2, app_0.getSchedulingOpportunities(priority)); assertEquals(3, app_0.getTotalRequiredResources(priority)); assertEquals(NodeType.NODE_LOCAL, assignment.getType()); // None->NODE_LOCAL // Another off switch, shouldn't allocate due to delay scheduling assignment = a.assignContainers(clusterResource, node_2, new ResourceLimits(clusterResource)); verify(app_0, never()).allocate(any(NodeType.class), eq(node_2), any(Priority.class), any(ResourceRequest.class), any(Container.class)); assertEquals(3, app_0.getSchedulingOpportunities(priority)); assertEquals(3, app_0.getTotalRequiredResources(priority)); assertEquals(NodeType.NODE_LOCAL, assignment.getType()); // None->NODE_LOCAL // Another off switch, now we should allocate // since missedOpportunities=3 and reqdContainers=3 assignment = a.assignContainers(clusterResource, node_2, new ResourceLimits(clusterResource)); verify(app_0).allocate(eq(NodeType.OFF_SWITCH), eq(node_2), any(Priority.class), any(ResourceRequest.class), any(Container.class)); assertEquals(4, app_0.getSchedulingOpportunities(priority)); // should NOT reset assertEquals(2, app_0.getTotalRequiredResources(priority)); assertEquals(NodeType.OFF_SWITCH, assignment.getType()); // NODE_LOCAL - node_0 assignment = a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); verify(app_0).allocate(eq(NodeType.NODE_LOCAL), eq(node_0), any(Priority.class), any(ResourceRequest.class), any(Container.class)); assertEquals(0, app_0.getSchedulingOpportunities(priority)); // should reset assertEquals(1, app_0.getTotalRequiredResources(priority)); assertEquals(NodeType.NODE_LOCAL, assignment.getType()); // NODE_LOCAL - node_1 assignment = a.assignContainers(clusterResource, node_1, new ResourceLimits(clusterResource)); verify(app_0).allocate(eq(NodeType.NODE_LOCAL), eq(node_1), any(Priority.class), any(ResourceRequest.class), any(Container.class)); assertEquals(0, app_0.getSchedulingOpportunities(priority)); // should reset assertEquals(0, app_0.getTotalRequiredResources(priority)); assertEquals(NodeType.NODE_LOCAL, assignment.getType()); // Add 1 more request to check for RACK_LOCAL app_0_requests_0.clear(); app_0_requests_0.add( TestUtils.createResourceRequest(host_1, 1*GB, 1, true, priority, recordFactory)); app_0_requests_0.add( TestUtils.createResourceRequest(rack_1, 1*GB, 1, true, priority, recordFactory)); app_0_requests_0.add( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 2, // one extra true, priority, recordFactory)); app_0.updateResourceRequests(app_0_requests_0); assertEquals(2, app_0.getTotalRequiredResources(priority)); String host_3 = "127.0.0.4"; // on rack_1 FiCaSchedulerNode node_3 = TestUtils.getMockNode(host_3, rack_1, 0, 8*GB); // Rack-delay doReturn(1).when(a).getNodeLocalityDelay(); // Shouldn't assign RACK_LOCAL yet assignment = a.assignContainers(clusterResource, node_3, new ResourceLimits(clusterResource)); assertEquals(1, app_0.getSchedulingOpportunities(priority)); assertEquals(2, app_0.getTotalRequiredResources(priority)); assertEquals(NodeType.NODE_LOCAL, assignment.getType()); // None->NODE_LOCAL // Should assign RACK_LOCAL now assignment = a.assignContainers(clusterResource, node_3, new ResourceLimits(clusterResource)); verify(app_0).allocate(eq(NodeType.RACK_LOCAL), eq(node_3), any(Priority.class), any(ResourceRequest.class), any(Container.class)); assertEquals(0, app_0.getSchedulingOpportunities(priority)); // should reset assertEquals(1, app_0.getTotalRequiredResources(priority)); assertEquals(NodeType.RACK_LOCAL, assignment.getType()); } @Test public void testApplicationPriorityScheduling() throws Exception { // Manipulate queue 'a' LeafQueue a = stubLeafQueue((LeafQueue)queues.get(A)); // User String user_0 = "user_0"; // Submit applications final ApplicationAttemptId appAttemptId_0 = TestUtils.getMockApplicationAttemptId(0, 0); FiCaSchedulerApp app_0 = spy(new FiCaSchedulerApp(appAttemptId_0, user_0, a, mock(ActiveUsersManager.class), spyRMContext)); a.submitApplicationAttempt(app_0, user_0); // Setup some nodes and racks String host_0 = "127.0.0.1"; String rack_0 = "rack_0"; FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, rack_0, 0, 8*GB); String host_1 = "127.0.0.2"; String rack_1 = "rack_1"; FiCaSchedulerNode node_1 = TestUtils.getMockNode(host_1, rack_1, 0, 8*GB); String host_2 = "127.0.0.3"; String rack_2 = "rack_2"; FiCaSchedulerNode node_2 = TestUtils.getMockNode(host_2, rack_2, 0, 8*GB); final int numNodes = 3; Resource clusterResource = Resources.createResource(numNodes * (8*GB), 1); when(csContext.getNumClusterNodes()).thenReturn(numNodes); // Setup resource-requests and submit List<ResourceRequest> app_0_requests_0 = new ArrayList<ResourceRequest>(); // P1 Priority priority_1 = TestUtils.createMockPriority(1); app_0_requests_0.add( TestUtils.createResourceRequest(host_0, 1*GB, 1, true, priority_1, recordFactory)); app_0_requests_0.add( TestUtils.createResourceRequest(rack_0, 1*GB, 1, true, priority_1, recordFactory)); app_0_requests_0.add( TestUtils.createResourceRequest(host_1, 1*GB, 1, true, priority_1, recordFactory)); app_0_requests_0.add( TestUtils.createResourceRequest(rack_1, 1*GB, 1, true, priority_1, recordFactory)); app_0_requests_0.add( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 2, true, priority_1, recordFactory)); // P2 Priority priority_2 = TestUtils.createMockPriority(2); app_0_requests_0.add( TestUtils.createResourceRequest(host_2, 2*GB, 1, true, priority_2, recordFactory)); app_0_requests_0.add( TestUtils.createResourceRequest(rack_2, 2*GB, 1, true, priority_2, recordFactory)); app_0_requests_0.add( TestUtils.createResourceRequest(ResourceRequest.ANY, 2*GB, 1, true, priority_2, recordFactory)); app_0.updateResourceRequests(app_0_requests_0); // Start testing... // Start with off switch, shouldn't allocate P1 due to delay scheduling // thus, no P2 either! a.assignContainers(clusterResource, node_2, new ResourceLimits(clusterResource)); verify(app_0, never()).allocate(any(NodeType.class), eq(node_2), eq(priority_1), any(ResourceRequest.class), any(Container.class)); assertEquals(1, app_0.getSchedulingOpportunities(priority_1)); assertEquals(2, app_0.getTotalRequiredResources(priority_1)); verify(app_0, never()).allocate(any(NodeType.class), eq(node_2), eq(priority_2), any(ResourceRequest.class), any(Container.class)); assertEquals(0, app_0.getSchedulingOpportunities(priority_2)); assertEquals(1, app_0.getTotalRequiredResources(priority_2)); // Another off-switch, shouldn't allocate P1 due to delay scheduling // thus, no P2 either! a.assignContainers(clusterResource, node_2, new ResourceLimits(clusterResource)); verify(app_0, never()).allocate(any(NodeType.class), eq(node_2), eq(priority_1), any(ResourceRequest.class), any(Container.class)); assertEquals(2, app_0.getSchedulingOpportunities(priority_1)); assertEquals(2, app_0.getTotalRequiredResources(priority_1)); verify(app_0, never()).allocate(any(NodeType.class), eq(node_2), eq(priority_2), any(ResourceRequest.class), any(Container.class)); assertEquals(0, app_0.getSchedulingOpportunities(priority_2)); assertEquals(1, app_0.getTotalRequiredResources(priority_2)); // Another off-switch, shouldn't allocate OFF_SWITCH P1 a.assignContainers(clusterResource, node_2, new ResourceLimits(clusterResource)); verify(app_0).allocate(eq(NodeType.OFF_SWITCH), eq(node_2), eq(priority_1), any(ResourceRequest.class), any(Container.class)); assertEquals(3, app_0.getSchedulingOpportunities(priority_1)); assertEquals(1, app_0.getTotalRequiredResources(priority_1)); verify(app_0, never()).allocate(any(NodeType.class), eq(node_2), eq(priority_2), any(ResourceRequest.class), any(Container.class)); assertEquals(0, app_0.getSchedulingOpportunities(priority_2)); assertEquals(1, app_0.getTotalRequiredResources(priority_2)); // Now, DATA_LOCAL for P1 a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); verify(app_0).allocate(eq(NodeType.NODE_LOCAL), eq(node_0), eq(priority_1), any(ResourceRequest.class), any(Container.class)); assertEquals(0, app_0.getSchedulingOpportunities(priority_1)); assertEquals(0, app_0.getTotalRequiredResources(priority_1)); verify(app_0, never()).allocate(any(NodeType.class), eq(node_0), eq(priority_2), any(ResourceRequest.class), any(Container.class)); assertEquals(0, app_0.getSchedulingOpportunities(priority_2)); assertEquals(1, app_0.getTotalRequiredResources(priority_2)); // Now, OFF_SWITCH for P2 a.assignContainers(clusterResource, node_1, new ResourceLimits(clusterResource)); verify(app_0, never()).allocate(any(NodeType.class), eq(node_1), eq(priority_1), any(ResourceRequest.class), any(Container.class)); assertEquals(0, app_0.getSchedulingOpportunities(priority_1)); assertEquals(0, app_0.getTotalRequiredResources(priority_1)); verify(app_0).allocate(eq(NodeType.OFF_SWITCH), eq(node_1), eq(priority_2), any(ResourceRequest.class), any(Container.class)); assertEquals(1, app_0.getSchedulingOpportunities(priority_2)); assertEquals(0, app_0.getTotalRequiredResources(priority_2)); } @Test public void testSchedulingConstraints() throws Exception { // Manipulate queue 'a' LeafQueue a = stubLeafQueue((LeafQueue)queues.get(A)); // User String user_0 = "user_0"; // Submit applications final ApplicationAttemptId appAttemptId_0 = TestUtils.getMockApplicationAttemptId(0, 0); FiCaSchedulerApp app_0 = spy(new FiCaSchedulerApp(appAttemptId_0, user_0, a, mock(ActiveUsersManager.class), spyRMContext)); a.submitApplicationAttempt(app_0, user_0); // Setup some nodes and racks String host_0_0 = "127.0.0.1"; String rack_0 = "rack_0"; FiCaSchedulerNode node_0_0 = TestUtils.getMockNode(host_0_0, rack_0, 0, 8*GB); String host_0_1 = "127.0.0.2"; FiCaSchedulerNode node_0_1 = TestUtils.getMockNode(host_0_1, rack_0, 0, 8*GB); String host_1_0 = "127.0.0.3"; String rack_1 = "rack_1"; FiCaSchedulerNode node_1_0 = TestUtils.getMockNode(host_1_0, rack_1, 0, 8*GB); final int numNodes = 3; Resource clusterResource = Resources.createResource( numNodes * (8*GB), numNodes * 16); when(csContext.getNumClusterNodes()).thenReturn(numNodes); // Setup resource-requests and submit Priority priority = TestUtils.createMockPriority(1); List<ResourceRequest> app_0_requests_0 = new ArrayList<ResourceRequest>(); app_0_requests_0.add( TestUtils.createResourceRequest(host_0_0, 1*GB, 1, true, priority, recordFactory)); app_0_requests_0.add( TestUtils.createResourceRequest(host_0_1, 1*GB, 1, true, priority, recordFactory)); app_0_requests_0.add( TestUtils.createResourceRequest(rack_0, 1*GB, 1, true, priority, recordFactory)); app_0_requests_0.add( TestUtils.createResourceRequest(host_1_0, 1*GB, 1, true, priority, recordFactory)); app_0_requests_0.add( TestUtils.createResourceRequest(rack_1, 1*GB, 1, true, priority, recordFactory)); app_0.updateResourceRequests(app_0_requests_0); // Start testing... // Add one request app_0_requests_0.clear(); app_0_requests_0.add( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 1, // only one true, priority, recordFactory)); app_0.updateResourceRequests(app_0_requests_0); // NODE_LOCAL - node_0_1 a.assignContainers(clusterResource, node_0_0, new ResourceLimits(clusterResource)); verify(app_0).allocate(eq(NodeType.NODE_LOCAL), eq(node_0_0), any(Priority.class), any(ResourceRequest.class), any(Container.class)); assertEquals(0, app_0.getSchedulingOpportunities(priority)); // should reset assertEquals(0, app_0.getTotalRequiredResources(priority)); // No allocation on node_1_0 even though it's node/rack local since // required(ANY) == 0 a.assignContainers(clusterResource, node_1_0, new ResourceLimits(clusterResource)); verify(app_0, never()).allocate(any(NodeType.class), eq(node_1_0), any(Priority.class), any(ResourceRequest.class), any(Container.class)); assertEquals(0, app_0.getSchedulingOpportunities(priority)); // Still zero // since #req=0 assertEquals(0, app_0.getTotalRequiredResources(priority)); // Add one request app_0_requests_0.clear(); app_0_requests_0.add( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 1, // only one true, priority, recordFactory)); app_0.updateResourceRequests(app_0_requests_0); // No allocation on node_0_1 even though it's node/rack local since // required(rack_1) == 0 a.assignContainers(clusterResource, node_0_1, new ResourceLimits(clusterResource)); verify(app_0, never()).allocate(any(NodeType.class), eq(node_1_0), any(Priority.class), any(ResourceRequest.class), any(Container.class)); assertEquals(1, app_0.getSchedulingOpportunities(priority)); assertEquals(1, app_0.getTotalRequiredResources(priority)); // NODE_LOCAL - node_1 a.assignContainers(clusterResource, node_1_0, new ResourceLimits(clusterResource)); verify(app_0).allocate(eq(NodeType.NODE_LOCAL), eq(node_1_0), any(Priority.class), any(ResourceRequest.class), any(Container.class)); assertEquals(0, app_0.getSchedulingOpportunities(priority)); // should reset assertEquals(0, app_0.getTotalRequiredResources(priority)); } @Test (timeout = 30000) public void testActivateApplicationAfterQueueRefresh() throws Exception { // Manipulate queue 'e' LeafQueue e = stubLeafQueue((LeafQueue)queues.get(E)); // Users final String user_e = "user_e"; when(amResourceRequest.getCapability()).thenReturn( Resources.createResource(1 * GB, 0)); // Submit applications final ApplicationAttemptId appAttemptId_0 = TestUtils.getMockApplicationAttemptId(0, 0); FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_e, e, mock(ActiveUsersManager.class), spyRMContext); e.submitApplicationAttempt(app_0, user_e); final ApplicationAttemptId appAttemptId_1 = TestUtils.getMockApplicationAttemptId(1, 0); FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_e, e, mock(ActiveUsersManager.class), spyRMContext); e.submitApplicationAttempt(app_1, user_e); // same user final ApplicationAttemptId appAttemptId_2 = TestUtils.getMockApplicationAttemptId(2, 0); FiCaSchedulerApp app_2 = new FiCaSchedulerApp(appAttemptId_2, user_e, e, mock(ActiveUsersManager.class), spyRMContext); e.submitApplicationAttempt(app_2, user_e); // same user // before reinitialization assertEquals(2, e.activeApplications.size()); assertEquals(1, e.pendingApplications.size()); csConf.setDouble(CapacitySchedulerConfiguration .MAXIMUM_APPLICATION_MASTERS_RESOURCE_PERCENT, CapacitySchedulerConfiguration .DEFAULT_MAXIMUM_APPLICATIONMASTERS_RESOURCE_PERCENT * 2); Map<String, CSQueue> newQueues = new HashMap<String, CSQueue>(); CSQueue newRoot = CapacityScheduler.parseQueue(csContext, csConf, null, CapacitySchedulerConfiguration.ROOT, newQueues, queues, TestUtils.spyHook); queues = newQueues; root.reinitialize(newRoot, csContext.getClusterResource()); // after reinitialization assertEquals(3, e.activeApplications.size()); assertEquals(0, e.pendingApplications.size()); } @Test (timeout = 30000) public void testNodeLocalityAfterQueueRefresh() throws Exception { // Manipulate queue 'e' LeafQueue e = stubLeafQueue((LeafQueue)queues.get(E)); // before reinitialization assertEquals(40, e.getNodeLocalityDelay()); csConf.setInt(CapacitySchedulerConfiguration .NODE_LOCALITY_DELAY, 60); Map<String, CSQueue> newQueues = new HashMap<String, CSQueue>(); CSQueue newRoot = CapacityScheduler.parseQueue(csContext, csConf, null, CapacitySchedulerConfiguration.ROOT, newQueues, queues, TestUtils.spyHook); queues = newQueues; root.reinitialize(newRoot, cs.getClusterResource()); // after reinitialization assertEquals(60, e.getNodeLocalityDelay()); } @Test (timeout = 30000) public void testActivateApplicationByUpdatingClusterResource() throws Exception { // Manipulate queue 'e' LeafQueue e = stubLeafQueue((LeafQueue)queues.get(E)); // Users final String user_e = "user_e"; when(amResourceRequest.getCapability()).thenReturn( Resources.createResource(1 * GB, 0)); // Submit applications final ApplicationAttemptId appAttemptId_0 = TestUtils.getMockApplicationAttemptId(0, 0); FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_e, e, mock(ActiveUsersManager.class), spyRMContext); e.submitApplicationAttempt(app_0, user_e); final ApplicationAttemptId appAttemptId_1 = TestUtils.getMockApplicationAttemptId(1, 0); FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_e, e, mock(ActiveUsersManager.class), spyRMContext); e.submitApplicationAttempt(app_1, user_e); // same user final ApplicationAttemptId appAttemptId_2 = TestUtils.getMockApplicationAttemptId(2, 0); FiCaSchedulerApp app_2 = new FiCaSchedulerApp(appAttemptId_2, user_e, e, mock(ActiveUsersManager.class), spyRMContext); e.submitApplicationAttempt(app_2, user_e); // same user // before updating cluster resource assertEquals(2, e.activeApplications.size()); assertEquals(1, e.pendingApplications.size()); Resource clusterResource = Resources.createResource(200 * 16 * GB, 100 * 32); e.updateClusterResource(clusterResource, new ResourceLimits(clusterResource)); // after updating cluster resource assertEquals(3, e.activeApplications.size()); assertEquals(0, e.pendingApplications.size()); } public boolean hasQueueACL(List<QueueUserACLInfo> aclInfos, QueueACL acl) { for (QueueUserACLInfo aclInfo : aclInfos) { if (aclInfo.getUserAcls().contains(acl)) { return true; } } return false; } @Test public void testInheritedQueueAcls() throws IOException { UserGroupInformation user = UserGroupInformation.getCurrentUser(); LeafQueue a = stubLeafQueue((LeafQueue)queues.get(A)); LeafQueue b = stubLeafQueue((LeafQueue)queues.get(B)); ParentQueue c = (ParentQueue)queues.get(C); LeafQueue c1 = stubLeafQueue((LeafQueue)queues.get(C1)); assertFalse(root.hasAccess(QueueACL.SUBMIT_APPLICATIONS, user)); assertTrue(a.hasAccess(QueueACL.SUBMIT_APPLICATIONS, user)); assertTrue(b.hasAccess(QueueACL.SUBMIT_APPLICATIONS, user)); assertFalse(c.hasAccess(QueueACL.SUBMIT_APPLICATIONS, user)); assertFalse(c1.hasAccess(QueueACL.SUBMIT_APPLICATIONS, user)); assertTrue(hasQueueACL( a.getQueueUserAclInfo(user), QueueACL.SUBMIT_APPLICATIONS)); assertTrue(hasQueueACL( b.getQueueUserAclInfo(user), QueueACL.SUBMIT_APPLICATIONS)); assertFalse(hasQueueACL( c.getQueueUserAclInfo(user), QueueACL.SUBMIT_APPLICATIONS)); assertFalse(hasQueueACL( c1.getQueueUserAclInfo(user), QueueACL.SUBMIT_APPLICATIONS)); } @Test public void testLocalityConstraints() throws Exception { // Manipulate queue 'a' LeafQueue a = stubLeafQueue((LeafQueue)queues.get(A)); // User String user_0 = "user_0"; // Submit applications final ApplicationAttemptId appAttemptId_0 = TestUtils.getMockApplicationAttemptId(0, 0); FiCaSchedulerApp app_0 = spy(new FiCaSchedulerApp(appAttemptId_0, user_0, a, mock(ActiveUsersManager.class), spyRMContext)); a.submitApplicationAttempt(app_0, user_0); final ApplicationAttemptId appAttemptId_1 = TestUtils.getMockApplicationAttemptId(1, 0); FiCaSchedulerApp app_1 = spy(new FiCaSchedulerApp(appAttemptId_1, user_0, a, mock(ActiveUsersManager.class), spyRMContext)); a.submitApplicationAttempt(app_1, user_0); // Setup some nodes and racks String host_0_0 = "127.0.0.1"; String rack_0 = "rack_0"; String host_0_1 = "127.0.0.2"; FiCaSchedulerNode node_0_1 = TestUtils.getMockNode(host_0_1, rack_0, 0, 8*GB); String host_1_0 = "127.0.0.3"; String rack_1 = "rack_1"; FiCaSchedulerNode node_1_0 = TestUtils.getMockNode(host_1_0, rack_1, 0, 8*GB); String host_1_1 = "127.0.0.4"; FiCaSchedulerNode node_1_1 = TestUtils.getMockNode(host_1_1, rack_1, 0, 8*GB); final int numNodes = 4; Resource clusterResource = Resources.createResource( numNodes * (8*GB), numNodes * 1); when(csContext.getNumClusterNodes()).thenReturn(numNodes); // Setup resource-requests // resourceName: <priority, memory, #containers, relaxLocality> // host_0_0: < 1, 1GB, 1, true > // host_0_1: < null > // rack_0: < null > <---- // host_1_0: < 1, 1GB, 1, true > // host_1_1: < null > // rack_1: < 1, 1GB, 1, false > <---- // ANY: < 1, 1GB, 1, false > <---- // Availability: // host_0_0: 8G // host_0_1: 8G // host_1_0: 8G // host_1_1: 8G // Blacklist: <host_0_0> Priority priority = TestUtils.createMockPriority(1); List<ResourceRequest> app_0_requests_0 = new ArrayList<ResourceRequest>(); app_0_requests_0.add( TestUtils.createResourceRequest(host_0_0, 1*GB, 1, true, priority, recordFactory)); app_0_requests_0.add( TestUtils.createResourceRequest(host_1_0, 1*GB, 1, true, priority, recordFactory)); app_0_requests_0.add( TestUtils.createResourceRequest(rack_1, 1*GB, 1, false, priority, recordFactory)); app_0_requests_0.add( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 1, // only one false, priority, recordFactory)); app_0.updateResourceRequests(app_0_requests_0); app_0.updateBlacklist(Collections.singletonList(host_0_0), null); app_0_requests_0.clear(); // // Start testing... // // node_0_1 // Shouldn't allocate since RR(rack_0) = null && RR(ANY) = relax: false a.assignContainers(clusterResource, node_0_1, new ResourceLimits(clusterResource)); verify(app_0, never()).allocate(any(NodeType.class), eq(node_0_1), any(Priority.class), any(ResourceRequest.class), any(Container.class)); assertEquals(0, app_0.getSchedulingOpportunities(priority)); // should be 0 // resourceName: <priority, memory, #containers, relaxLocality> // host_0_0: < 1, 1GB, 1, true > // host_0_1: < null > // rack_0: < null > <---- // host_1_0: < 1, 1GB, 1, true > // host_1_1: < null > // rack_1: < 1, 1GB, 1, false > <---- // ANY: < 1, 1GB, 1, false > <---- // Availability: // host_0_0: 8G // host_0_1: 8G // host_1_0: 8G // host_1_1: 8G // Blacklist: <host_0_0> // node_1_1 // Shouldn't allocate since RR(rack_1) = relax: false a.assignContainers(clusterResource, node_1_1, new ResourceLimits(clusterResource)); verify(app_0, never()).allocate(any(NodeType.class), eq(node_0_1), any(Priority.class), any(ResourceRequest.class), any(Container.class)); assertEquals(0, app_0.getSchedulingOpportunities(priority)); // should be 0 // Allow rack-locality for rack_1, but blacklist node_1_1 app_0_requests_0.add( TestUtils.createResourceRequest(rack_1, 1*GB, 1, true, priority, recordFactory)); app_0.updateResourceRequests(app_0_requests_0); app_0.updateBlacklist(Collections.singletonList(host_1_1), null); app_0_requests_0.clear(); // resourceName: <priority, memory, #containers, relaxLocality> // host_0_0: < 1, 1GB, 1, true > // host_0_1: < null > // rack_0: < null > // host_1_0: < 1, 1GB, 1, true > // host_1_1: < null > // rack_1: < 1, 1GB, 1, true > // ANY: < 1, 1GB, 1, false > // Availability: // host_0_0: 8G // host_0_1: 8G // host_1_0: 8G // host_1_1: 8G // Blacklist: < host_0_0 , host_1_1 > <---- // node_1_1 // Shouldn't allocate since node_1_1 is blacklisted a.assignContainers(clusterResource, node_1_1, new ResourceLimits(clusterResource)); verify(app_0, never()).allocate(any(NodeType.class), eq(node_1_1), any(Priority.class), any(ResourceRequest.class), any(Container.class)); assertEquals(0, app_0.getSchedulingOpportunities(priority)); // should be 0 // Now, remove node_1_1 from blacklist, but add rack_1 to blacklist app_0.updateResourceRequests(app_0_requests_0); app_0.updateBlacklist( Collections.singletonList(rack_1), Collections.singletonList(host_1_1)); app_0_requests_0.clear(); // resourceName: <priority, memory, #containers, relaxLocality> // host_0_0: < 1, 1GB, 1, true > // host_0_1: < null > // rack_0: < null > // host_1_0: < 1, 1GB, 1, true > // host_1_1: < null > // rack_1: < 1, 1GB, 1, true > // ANY: < 1, 1GB, 1, false > // Availability: // host_0_0: 8G // host_0_1: 8G // host_1_0: 8G // host_1_1: 8G // Blacklist: < host_0_0 , rack_1 > <---- // node_1_1 // Shouldn't allocate since rack_1 is blacklisted a.assignContainers(clusterResource, node_1_1, new ResourceLimits(clusterResource)); verify(app_0, never()).allocate(any(NodeType.class), eq(node_1_1), any(Priority.class), any(ResourceRequest.class), any(Container.class)); assertEquals(0, app_0.getSchedulingOpportunities(priority)); // should be 0 // Now remove rack_1 from blacklist app_0.updateResourceRequests(app_0_requests_0); app_0.updateBlacklist(null, Collections.singletonList(rack_1)); app_0_requests_0.clear(); // resourceName: <priority, memory, #containers, relaxLocality> // host_0_0: < 1, 1GB, 1, true > // host_0_1: < null > // rack_0: < null > // host_1_0: < 1, 1GB, 1, true > // host_1_1: < null > // rack_1: < 1, 1GB, 1, true > // ANY: < 1, 1GB, 1, false > // Availability: // host_0_0: 8G // host_0_1: 8G // host_1_0: 8G // host_1_1: 8G // Blacklist: < host_0_0 > <---- // Now, should allocate since RR(rack_1) = relax: true a.assignContainers(clusterResource, node_1_1, new ResourceLimits(clusterResource)); verify(app_0,never()).allocate(eq(NodeType.RACK_LOCAL), eq(node_1_1), any(Priority.class), any(ResourceRequest.class), any(Container.class)); assertEquals(0, app_0.getSchedulingOpportunities(priority)); assertEquals(1, app_0.getTotalRequiredResources(priority)); // Now sanity-check node_local app_0_requests_0.add( TestUtils.createResourceRequest(rack_1, 1*GB, 1, false, priority, recordFactory)); app_0_requests_0.add( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 1, // only one false, priority, recordFactory)); app_0.updateResourceRequests(app_0_requests_0); app_0_requests_0.clear(); // resourceName: <priority, memory, #containers, relaxLocality> // host_0_0: < 1, 1GB, 1, true > // host_0_1: < null > // rack_0: < null > // host_1_0: < 1, 1GB, 1, true > // host_1_1: < null > // rack_1: < 1, 1GB, 1, false > <---- // ANY: < 1, 1GB, 1, false > <---- // Availability: // host_0_0: 8G // host_0_1: 8G // host_1_0: 8G // host_1_1: 7G a.assignContainers(clusterResource, node_1_0, new ResourceLimits(clusterResource)); verify(app_0).allocate(eq(NodeType.NODE_LOCAL), eq(node_1_0), any(Priority.class), any(ResourceRequest.class), any(Container.class)); assertEquals(0, app_0.getSchedulingOpportunities(priority)); assertEquals(0, app_0.getTotalRequiredResources(priority)); } @Test public void testMaxAMResourcePerQueuePercentAfterQueueRefresh() throws Exception { CapacitySchedulerConfiguration csConf = new CapacitySchedulerConfiguration(); Resource clusterResource = Resources .createResource(100 * 16 * GB, 100 * 32); CapacitySchedulerContext csContext = mockCSContext(csConf, clusterResource); when(csContext.getRMContext()).thenReturn(rmContext); csConf.setFloat(CapacitySchedulerConfiguration. MAXIMUM_APPLICATION_MASTERS_RESOURCE_PERCENT, 0.1f); ParentQueue root = new ParentQueue(csContext, CapacitySchedulerConfiguration.ROOT, null, null); csConf.setCapacity(CapacitySchedulerConfiguration.ROOT + "." + A, 80); LeafQueue a = new LeafQueue(csContext, A, root, null); assertEquals(0.1f, a.getMaxAMResourcePerQueuePercent(), 1e-3f); assertEquals(a.getAMResourceLimit(), Resources.createResource(160 * GB, 1)); csConf.setFloat(CapacitySchedulerConfiguration. MAXIMUM_APPLICATION_MASTERS_RESOURCE_PERCENT, 0.2f); LeafQueue newA = new LeafQueue(csContext, A, root, null); a.reinitialize(newA, clusterResource); assertEquals(0.2f, a.getMaxAMResourcePerQueuePercent(), 1e-3f); assertEquals(a.getAMResourceLimit(), Resources.createResource(320 * GB, 1)); Resource newClusterResource = Resources.createResource(100 * 20 * GB, 100 * 32); a.updateClusterResource(newClusterResource, new ResourceLimits(newClusterResource)); // 100 * 20 * 0.2 = 400 assertEquals(a.getAMResourceLimit(), Resources.createResource(400 * GB, 1)); } @Test public void testAllocateContainerOnNodeWithoutOffSwitchSpecified() throws Exception { // Manipulate queue 'a' LeafQueue a = stubLeafQueue((LeafQueue) queues.get(B)); // Users final String user_0 = "user_0"; // Submit applications final ApplicationAttemptId appAttemptId_0 = TestUtils.getMockApplicationAttemptId(0, 0); FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_0, a, mock(ActiveUsersManager.class), spyRMContext); a.submitApplicationAttempt(app_0, user_0); final ApplicationAttemptId appAttemptId_1 = TestUtils.getMockApplicationAttemptId(1, 0); FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_0, a, mock(ActiveUsersManager.class), spyRMContext); a.submitApplicationAttempt(app_1, user_0); // same user // Setup some nodes String host_0 = "127.0.0.1"; FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, DEFAULT_RACK, 0, 8 * GB); final int numNodes = 1; Resource clusterResource = Resources.createResource(numNodes * (8 * GB), numNodes * 16); when(csContext.getNumClusterNodes()).thenReturn(numNodes); // Setup resource-requests Priority priority = TestUtils.createMockPriority(1); app_0.updateResourceRequests(Arrays.asList(TestUtils.createResourceRequest( "127.0.0.1", 1 * GB, 3, true, priority, recordFactory), TestUtils .createResourceRequest(DEFAULT_RACK, 1 * GB, 3, true, priority, recordFactory))); try { a.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); } catch (NullPointerException e) { Assert.fail("NPE when allocating container on node but " + "forget to set off-switch request should be handled"); } } @Test public void testConcurrentAccess() throws Exception { YarnConfiguration conf = new YarnConfiguration(); RMStorageFactory.setConfiguration(conf); YarnAPIStorageFactory.setConfiguration(conf); DBUtility.InitializeDB(); MockRM rm = new MockRM(); rm.init(conf); rm.start(); final String queue = "default"; final String user = "user"; CapacityScheduler cs = (CapacityScheduler) rm.getResourceScheduler(); final LeafQueue defaultQueue = (LeafQueue) cs.getQueue(queue); final List<FiCaSchedulerApp> listOfApps = createListOfApps(10000, user, defaultQueue); final CyclicBarrier cb = new CyclicBarrier(2); final List<ConcurrentModificationException> conException = new ArrayList<ConcurrentModificationException>(); Thread submitAndRemove = new Thread(new Runnable() { @Override public void run() { for (FiCaSchedulerApp fiCaSchedulerApp : listOfApps) { defaultQueue.submitApplicationAttempt(fiCaSchedulerApp, user); } try { cb.await(); } catch (Exception e) { // Ignore } for (FiCaSchedulerApp fiCaSchedulerApp : listOfApps) { defaultQueue.finishApplicationAttempt(fiCaSchedulerApp, queue); } } }, "SubmitAndRemoveApplicationAttempt Thread"); Thread getAppsInQueue = new Thread(new Runnable() { List<ApplicationAttemptId> apps = new ArrayList<ApplicationAttemptId>(); @Override public void run() { try { try { cb.await(); } catch (Exception e) { // Ignore } defaultQueue.collectSchedulerApplications(apps); } catch (ConcurrentModificationException e) { conException.add(e); } } }, "GetAppsInQueue Thread"); submitAndRemove.start(); getAppsInQueue.start(); submitAndRemove.join(); getAppsInQueue.join(); assertTrue("ConcurrentModificationException is thrown", conException.isEmpty()); rm.stop(); } @Test public void testGetTotalPendingResourcesConsideringUserLimitOneUser() throws Exception { // Manipulate queue 'e' LeafQueue e = stubLeafQueue((LeafQueue)queues.get(E)); // Allow queue 'e' to use 100% of cluster resources (max capacity). e.setMaxCapacity(1.0f); // When used queue resources goes above capacity (in this case, 1%), user // resource limit (used in calculating headroom) is calculated in small // increments to ensure that user-limit-percent can be met for all users in // a queue. Take user-limit-percent out of the equation so that user // resource limit will always be calculated to its max possible value. e.setUserLimit(1000); final String user_0 = "user_0"; // Submit 2 applications for user_0 final ApplicationAttemptId appAttemptId_0 = TestUtils.getMockApplicationAttemptId(0, 0); FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_0, e, mock(ActiveUsersManager.class), spyRMContext); e.submitApplicationAttempt(app_0, user_0); final ApplicationAttemptId appAttemptId_1 = TestUtils.getMockApplicationAttemptId(1, 0); FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_0, e, mock(ActiveUsersManager.class), spyRMContext); e.submitApplicationAttempt(app_1, user_0); // same user // Setup 1 node with 100GB of memory resources. String host_0 = "127.0.0.1"; FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, DEFAULT_RACK, 0, 100*GB); final int numNodes = 1; Resource clusterResource = Resources.createResource(numNodes * (100*GB), numNodes * 128); when(csContext.getNumClusterNodes()).thenReturn(numNodes); // Pending resource requests for app_0 and app_1 total 5GB. Priority priority = TestUtils.createMockPriority(1); app_0.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 3, true, priority, recordFactory))); app_1.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 2, true, priority, recordFactory))); // Start testing... // Assign 1st Container of 1GB e.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); // With queue capacity set at 1% of 100GB and user-limit-factor set to 1.0, // queue 'e' should be able to consume 1GB. // The first container should be assigned to app_0 with no headroom left // even though user_0's apps are still asking for a total of 4GB. assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(0*GB, e.getTotalPendingResourcesConsideringUserLimit( clusterResource).getMemory()); // Assign 2nd container of 1GB e.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); // user_0 has no headroom due to user-limit-factor of 1.0. However capacity // scheduler will assign one container more than user-limit-factor. // This container should have also gone to app_0, still with no neadroom // even though app_0 and app_1 are asking for a cumulative 3GB. assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(0*GB, e.getTotalPendingResourcesConsideringUserLimit( clusterResource).getMemory()); // Can't allocate 3rd container due to user-limit. Headroom still 0. e.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(2*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(0*GB, e.getTotalPendingResourcesConsideringUserLimit( clusterResource).getMemory()); // Increase user-limit-factor from 1GB to 10GB (1% * 10 * 100GB = 10GB). // Pending for both app_0 and app_1 are still 3GB, so user-limit-factor // is no longer the limiting factor. e.setUserLimitFactor(10.0f); assertEquals(3*GB, e.getTotalPendingResourcesConsideringUserLimit( clusterResource).getMemory()); e.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); // app_0 is now satisified, app_1 is still asking for 2GB. assertEquals(3*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(2*GB, e.getTotalPendingResourcesConsideringUserLimit( clusterResource).getMemory()); // Get the last 2 containers for app_1, no more pending requests. e.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); e.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); assertEquals(3*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(2*GB, app_1.getCurrentConsumption().getMemory()); assertEquals(0*GB, e.getTotalPendingResourcesConsideringUserLimit( clusterResource).getMemory()); // Release each container from app_0 for (RMContainer rmContainer : app_0.getLiveContainers()) { e.completedContainer(clusterResource, app_0, node_0, rmContainer, ContainerStatus.newInstance(rmContainer.getContainerId(), ContainerState.COMPLETE, "", ContainerExitStatus.KILLED_BY_RESOURCEMANAGER), RMContainerEventType.KILL, null, true); } // Release each container from app_1 for (RMContainer rmContainer : app_1.getLiveContainers()) { e.completedContainer(clusterResource, app_1, node_0, rmContainer, ContainerStatus.newInstance(rmContainer.getContainerId(), ContainerState.COMPLETE, "", ContainerExitStatus.KILLED_BY_RESOURCEMANAGER), RMContainerEventType.KILL, null, true); } } @Test public void testGetTotalPendingResourcesConsideringUserLimitTwoUsers() throws Exception { // Manipulate queue 'e' LeafQueue e = stubLeafQueue((LeafQueue)queues.get(E)); // Allow queue 'e' to use 100% of cluster resources (max capacity). e.setMaxCapacity(1.0f); // When used queue resources goes above capacity (in this case, 1%), user // resource limit (used in calculating headroom) is calculated in small // increments to ensure that user-limit-percent can be met for all users in // a queue. Take user-limit-percent out of the equation so that user // resource limit will always be calculated to its max possible value. e.setUserLimit(1000); final String user_0 = "user_0"; final String user_1 = "user_1"; // Submit 2 applications for user_0 final ApplicationAttemptId appAttemptId_0 = TestUtils.getMockApplicationAttemptId(0, 0); FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user_0, e, mock(ActiveUsersManager.class), spyRMContext); e.submitApplicationAttempt(app_0, user_0); final ApplicationAttemptId appAttemptId_1 = TestUtils.getMockApplicationAttemptId(1, 0); FiCaSchedulerApp app_1 = new FiCaSchedulerApp(appAttemptId_1, user_0, e, mock(ActiveUsersManager.class), spyRMContext); e.submitApplicationAttempt(app_1, user_0); // Submit 2 applications for user_1 final ApplicationAttemptId appAttemptId_2 = TestUtils.getMockApplicationAttemptId(2, 0); FiCaSchedulerApp app_2 = new FiCaSchedulerApp(appAttemptId_2, user_1, e, mock(ActiveUsersManager.class), spyRMContext); e.submitApplicationAttempt(app_2, user_1); final ApplicationAttemptId appAttemptId_3 = TestUtils.getMockApplicationAttemptId(3, 0); FiCaSchedulerApp app_3 = new FiCaSchedulerApp(appAttemptId_3, user_1, e, mock(ActiveUsersManager.class), spyRMContext); e.submitApplicationAttempt(app_3, user_1); // Setup 1 node with 100GB of memory resources. String host_0 = "127.0.0.1"; FiCaSchedulerNode node_0 = TestUtils.getMockNode(host_0, DEFAULT_RACK, 0, 100*GB); final int numNodes = 1; Resource clusterResource = Resources.createResource(numNodes * (100*GB), numNodes * 128); when(csContext.getNumClusterNodes()).thenReturn(numNodes); // Pending resource requests for user_0: app_0 and app_1 total 3GB (one // 1GB container for app_0 and 2 1GB containers for app_1). Priority priority = TestUtils.createMockPriority(1); app_0.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 1, true, priority, recordFactory))); app_1.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 2, true, priority, recordFactory))); // Pending resource requests for user_1: app_2 and app_3 total 3GB (two // 1GB containers for app_2 and one 1GB container for app_3). priority = TestUtils.createMockPriority(1); app_2.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 2, true, priority, recordFactory))); app_3.updateResourceRequests(Collections.singletonList( TestUtils.createResourceRequest(ResourceRequest.ANY, 1*GB, 1, true, priority, recordFactory))); // Start testing... // Assign 1st Container of 1GB e.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); // With queue capacity set at 1% of 100GB and user-limit-factor set to 1.0, // queue 'e' should be able to consume 1GB per user // The first container should be assigned to app_0. Pending considering // user limit should be 1GB to account for user_1's headroom. assertEquals(1*GB, e.getTotalPendingResourcesConsideringUserLimit( clusterResource).getMemory()); assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_1.getCurrentConsumption().getMemory()); // user_1's apps: assertEquals(0*GB, app_2.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_3.getCurrentConsumption().getMemory()); // Assign 2nd container of 1GB e.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); // user_0 has no headroom due to user-limit-factor of 1.0. However capacity // scheduler will assign one container more than user-limit-factor. // This container will have gone to app_1; still with 1GB pending resources // to account for the headroom of user_1. // user_0's apps: assertEquals(1*GB, e.getTotalPendingResourcesConsideringUserLimit( clusterResource).getMemory()); assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(1*GB, app_1.getCurrentConsumption().getMemory()); // user_1's apps: assertEquals(0*GB, app_2.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_3.getCurrentConsumption().getMemory()); // 3rd container. e.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); // Allocated to user_1's app_2 since user_1 still has user resource limit // of 1GB. Available pending resources are now 0. // user_0's apps: assertEquals(0*GB, e.getTotalPendingResourcesConsideringUserLimit( clusterResource).getMemory()); assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(1*GB, app_1.getCurrentConsumption().getMemory()); // user_1's apps: assertEquals(1*GB, app_2.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_3.getCurrentConsumption().getMemory()); // 4th container. e.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); // Allocated to user_1's app_2 since scheduler allocates 1 container // above user_1's resource limit. Pending still 0. // user_0's apps: assertEquals(0*GB, e.getTotalPendingResourcesConsideringUserLimit( clusterResource).getMemory()); assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(1*GB, app_1.getCurrentConsumption().getMemory()); // user_1's apps: assertEquals(2*GB, app_2.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_3.getCurrentConsumption().getMemory()); e.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); // Cannot allocate 5th container because both users are above their allowed // user resource limit. Values should be the same as previously. // user_0's apps: assertEquals(0*GB, e.getTotalPendingResourcesConsideringUserLimit( clusterResource).getMemory()); assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(1*GB, app_1.getCurrentConsumption().getMemory()); // user_1's apps: assertEquals(2*GB, app_2.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_3.getCurrentConsumption().getMemory()); // Increase user-limit-factor from 1GB to 10GB (10% * 100GB = 10GB). // Pending for both app_0 and app_1 are still 3GB, so user-limit-factor // is no longer the limiting factor. e.setUserLimitFactor(10.0f); // pending considering user limit should be 2GB, since 2 containers are // still being requested, one from each user. assertEquals(2*GB, e.getTotalPendingResourcesConsideringUserLimit( clusterResource).getMemory()); e.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); // Next container goes to user_0's app_1, since it was still pending. // user_0's apps: assertEquals(1*GB, e.getTotalPendingResourcesConsideringUserLimit( clusterResource).getMemory()); assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(2*GB, app_1.getCurrentConsumption().getMemory()); // user_1's apps: assertEquals(2*GB, app_2.getCurrentConsumption().getMemory()); assertEquals(0*GB, app_3.getCurrentConsumption().getMemory()); e.assignContainers(clusterResource, node_0, new ResourceLimits(clusterResource)); // Last container goes to user_1's app_3, since it was still pending. // user_0's apps: assertEquals(0*GB, e.getTotalPendingResourcesConsideringUserLimit( clusterResource).getMemory()); assertEquals(1*GB, app_0.getCurrentConsumption().getMemory()); assertEquals(2*GB, app_1.getCurrentConsumption().getMemory()); // user_1's apps: assertEquals(2*GB, app_2.getCurrentConsumption().getMemory()); assertEquals(1*GB, app_3.getCurrentConsumption().getMemory()); // Release each container from app_0 for (RMContainer rmContainer : app_0.getLiveContainers()) { e.completedContainer(clusterResource, app_0, node_0, rmContainer, ContainerStatus.newInstance(rmContainer.getContainerId(), ContainerState.COMPLETE, "", ContainerExitStatus.KILLED_BY_RESOURCEMANAGER), RMContainerEventType.KILL, null, true); } // Release each container from app_1 for (RMContainer rmContainer : app_1.getLiveContainers()) { e.completedContainer(clusterResource, app_1, node_0, rmContainer, ContainerStatus.newInstance(rmContainer.getContainerId(), ContainerState.COMPLETE, "", ContainerExitStatus.KILLED_BY_RESOURCEMANAGER), RMContainerEventType.KILL, null, true); } } private List<FiCaSchedulerApp> createListOfApps(int noOfApps, String user, LeafQueue defaultQueue) { List<FiCaSchedulerApp> appsLists = new ArrayList<FiCaSchedulerApp>(); for (int i = 0; i < noOfApps; i++) { ApplicationAttemptId appAttemptId_0 = TestUtils.getMockApplicationAttemptId(i, 0); FiCaSchedulerApp app_0 = new FiCaSchedulerApp(appAttemptId_0, user, defaultQueue, mock(ActiveUsersManager.class), spyRMContext); appsLists.add(app_0); } return appsLists; } private CapacitySchedulerContext mockCSContext( CapacitySchedulerConfiguration csConf, Resource clusterResource) { CapacitySchedulerContext csContext = mock(CapacitySchedulerContext.class); when(csContext.getConfiguration()).thenReturn(csConf); when(csContext.getConf()).thenReturn(new YarnConfiguration()); when(csContext.getResourceCalculator()).thenReturn(resourceCalculator); when(csContext.getClusterResource()).thenReturn(clusterResource); when(csContext.getMinimumResourceCapability()).thenReturn( Resources.createResource(GB, 1)); when(csContext.getMaximumResourceCapability()).thenReturn( Resources.createResource(2 * GB, 2)); return csContext; } @After public void tearDown() throws Exception { if (cs != null) { cs.stop(); } } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-waf/src/main/java/com/amazonaws/services/waf/model/waf/transform/GetRuleRequestMarshaller.java
1931
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.waf.model.waf.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.waf.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * GetRuleRequestMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class GetRuleRequestMarshaller { private static final MarshallingInfo<String> RULEID_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("RuleId").build(); private static final GetRuleRequestMarshaller instance = new GetRuleRequestMarshaller(); public static GetRuleRequestMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(GetRuleRequest getRuleRequest, ProtocolMarshaller protocolMarshaller) { if (getRuleRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getRuleRequest.getRuleId(), RULEID_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
simvip/isliusar
chapter_010_Spring_Data/src/main/java/my/jpa/models/Car.java
1220
package my.jpa.models; import lombok.Data; import my.jpa.models.parts.BaseBlock; import my.jpa.models.parts.Engine; import my.jpa.models.parts.GearBox; import my.jpa.models.parts.Transmission; import javax.persistence.*; /** * Created by Ivan Sliusar on 16.05.2018. * Red Line Soft corp. */ @Entity @Table(name = "car") public @Data class Car implements BaseBlock{ @Id @GeneratedValue(strategy = GenerationType.IDENTITY) @Column(name = "id") private Integer id; @Column(name = "name") private String name; @ManyToOne(cascade = CascadeType.ALL) @JoinColumn (name = "id_engine") private Engine engine; @ManyToOne(cascade = CascadeType.ALL) @JoinColumn (name = "id_transmission") private Transmission transmission; @ManyToOne(cascade = CascadeType.ALL) @JoinColumn (name = "id_gearbox") private GearBox gearbox; public Car(String name, Engine engine, Transmission transmission, GearBox gearbox) { this.name = name; this.engine = engine; this.transmission = transmission; this.gearbox = gearbox; } public Car() {} @Override public String toString() { return this.getId().toString(); } }
apache-2.0
jacquesgiraudel/TP-Formation-Android
4 - Reseau et stockage donnees/TP ProductsReader/app/src/androidTest/java/com/jgl/tpproductsreader/ApplicationTest.java
355
package com.jgl.tpproductsreader; import android.app.Application; import android.test.ApplicationTestCase; /** * <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a> */ public class ApplicationTest extends ApplicationTestCase<Application> { public ApplicationTest() { super(Application.class); } }
apache-2.0
bafeimao/umbrella
umbrella-support/src/main/java/net/bafeimao/umbrella/support/data/entity/query/CriteriaChain.java
1502
/* * Copyright 2002-2015 by bafeimao.net * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.bafeimao.umbrella.support.data.entity.query; import java.util.LinkedList; /** * Created by bafeimao on 2015/10/28. */ public class CriteriaChain { private final Query query; private LinkedList<Criteria> criterias = new LinkedList<Criteria>(); private boolean nextOr = false; public CriteriaChain(Query query) { this.query = query; } public FieldOps field(String name) { return new FieldOps(query, name); } public void add(Criteria criteria) { criteria.setOr(nextOr); criterias.add(criteria); this.nextOr = false; } public Query getQuery() { return query; } public LinkedList<Criteria> getCriterias() { return criterias; } public boolean isNextOr() { return nextOr; } public void setNextOr(boolean nextOr) { this.nextOr = nextOr; } }
apache-2.0
OpenGamma/Strata
modules/pricer/src/test/java/com/opengamma/strata/pricer/impl/model/HullWhiteOneFactorPiecewiseConstantInterestRateModelTest.java
27962
/* * Copyright (C) 2015 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.strata.pricer.impl.model; import static com.opengamma.strata.collect.TestHelper.assertSerialization; import static com.opengamma.strata.collect.TestHelper.coverImmutableBean; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.data.Offset.offset; import java.time.LocalDate; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import com.opengamma.strata.basics.ReferenceData; import com.opengamma.strata.basics.date.DayCounts; import com.opengamma.strata.basics.index.IborIndex; import com.opengamma.strata.basics.index.IborIndices; import com.opengamma.strata.basics.value.ValueDerivatives; import com.opengamma.strata.collect.DoubleArrayMath; import com.opengamma.strata.collect.array.DoubleArray; import com.opengamma.strata.collect.array.DoubleMatrix; import com.opengamma.strata.collect.tuple.Pair; import com.opengamma.strata.pricer.impl.rate.model.HullWhiteOneFactorPiecewiseConstantInterestRateModel; import com.opengamma.strata.pricer.model.HullWhiteOneFactorPiecewiseConstantParameters; /** * Test {@link HullWhiteOneFactorPiecewiseConstantInterestRateModel}. */ public class HullWhiteOneFactorPiecewiseConstantInterestRateModelTest { private static final ReferenceData REF_DATA = ReferenceData.standard(); private static final double MEAN_REVERSION = 0.01; private static final DoubleArray VOLATILITY = DoubleArray.of(0.01, 0.011, 0.012, 0.013, 0.014); private static final DoubleArray VOLATILITY_TIME = DoubleArray.of(0.5, 1.0, 2.0, 5.0); private static final HullWhiteOneFactorPiecewiseConstantParameters MODEL_PARAMETERS = HullWhiteOneFactorPiecewiseConstantParameters.of(MEAN_REVERSION, VOLATILITY, VOLATILITY_TIME); private static final HullWhiteOneFactorPiecewiseConstantInterestRateModel MODEL = HullWhiteOneFactorPiecewiseConstantInterestRateModel.DEFAULT; private static final DoubleArray DCF_FIXED = DoubleArray.of(0.50, 0.48); private static final DoubleArray ALPHA_FIXED = DoubleArray.of(0.02, 0.04); private static final DoubleArray DCF_IBOR = DoubleArray.of(-1.0, -0.01, 0.01, -0.01, 0.95); private static final DoubleArray ALPHA_IBOR = DoubleArray.of(0.00, 0.01, 0.02, 0.03, 0.04); private static final double TOLERANCE_RATE = 1.0E-10; private static final double TOLERANCE_RATE_DELTA = 1.0E-8; private static final double TOLERANCE_RATE_DELTA2 = 1.0E-7; private static final double TOLERANCE_ALPHA = 1E-8; private static final IborIndex EURIBOR3M = IborIndices.EUR_EURIBOR_3M; /** * Tests the class getters. */ @Test public void getter() { assertThat(MEAN_REVERSION).isEqualTo(MODEL_PARAMETERS.getMeanReversion()); for (int loopperiod = 0; loopperiod < VOLATILITY.size(); loopperiod++) { assertThat(VOLATILITY.get(loopperiod)).isEqualTo(MODEL_PARAMETERS.getVolatility().get(loopperiod)); } double[] volTime = MODEL_PARAMETERS.getVolatilityTime().toArray(); for (int loopperiod = 0; loopperiod < VOLATILITY_TIME.size(); loopperiod++) { assertThat(VOLATILITY_TIME.get(loopperiod)).isEqualTo(volTime[loopperiod + 1]); } } /** * Tests the class setters. */ @Test public void setter() { double volReplaced = 0.02; HullWhiteOneFactorPiecewiseConstantParameters param1 = MODEL_PARAMETERS.withLastVolatility(volReplaced); assertThat(volReplaced).isEqualTo(param1.getVolatility().get(param1.getVolatility().size() - 1)); HullWhiteOneFactorPiecewiseConstantParameters param2 = MODEL_PARAMETERS.withLastVolatility(VOLATILITY.get(VOLATILITY.size() - 1)); for (int loopperiod = 0; loopperiod < param2.getVolatility().size(); loopperiod++) { assertThat(VOLATILITY.get(loopperiod)).isEqualTo(param2.getVolatility().get(loopperiod)); } } /** * Tests the equal and hash code methods. */ @Test public void equalHash() { HullWhiteOneFactorPiecewiseConstantParameters newParameter = HullWhiteOneFactorPiecewiseConstantParameters.of(MEAN_REVERSION, VOLATILITY, VOLATILITY_TIME); assertThat(MODEL_PARAMETERS.equals(newParameter)).isTrue(); assertThat(MODEL_PARAMETERS.hashCode() == newParameter.hashCode()).isTrue(); HullWhiteOneFactorPiecewiseConstantParameters modifiedParameter = HullWhiteOneFactorPiecewiseConstantParameters.of(MEAN_REVERSION + 0.01, VOLATILITY, VOLATILITY_TIME); assertThat(MODEL_PARAMETERS.equals(modifiedParameter)).isFalse(); } /** * Test the future convexity adjustment factor v a hard-coded value. */ @Test public void futureConvexityFactor() { LocalDate spotDate = LocalDate.of(2012, 9, 19); LocalDate lastTradingDate = EURIBOR3M.calculateFixingFromEffective(spotDate, REF_DATA); LocalDate referenceDate = LocalDate.of(2010, 8, 18); double tradeLastTime = DayCounts.ACT_ACT_ISDA.relativeYearFraction(referenceDate, lastTradingDate); double fixStartTime = DayCounts.ACT_ACT_ISDA.relativeYearFraction(referenceDate, spotDate); double fixEndTime = DayCounts.ACT_ACT_ISDA.relativeYearFraction( referenceDate, EURIBOR3M.calculateMaturityFromEffective(spotDate, REF_DATA)); double factor = MODEL.futuresConvexityFactor(MODEL_PARAMETERS, tradeLastTime, fixStartTime, fixEndTime); double expectedFactor = 1.000079130767980; assertThat(expectedFactor).isCloseTo(factor, offset(TOLERANCE_RATE)); // Derivative with respect to volatility parameters int nbSigma = MODEL_PARAMETERS.getVolatility().size(); ValueDerivatives factorDeriv = MODEL.futuresConvexityFactorAdjoint(MODEL_PARAMETERS, tradeLastTime, fixStartTime, fixEndTime); double factor2 = factorDeriv.getValue(); double[] sigmaBar = factorDeriv.getDerivatives().toArray(); assertThat(factor).isCloseTo(factor2, offset(TOLERANCE_RATE)); double[] sigmaBarExpected = new double[nbSigma]; double shift = 1E-6; for (int loops = 0; loops < nbSigma; loops++) { double[] volBumped = VOLATILITY.toArray(); volBumped[loops] += shift; HullWhiteOneFactorPiecewiseConstantParameters parametersBumped = HullWhiteOneFactorPiecewiseConstantParameters .of(MEAN_REVERSION, DoubleArray.copyOf(volBumped), VOLATILITY_TIME); double factorPlus = MODEL.futuresConvexityFactor(parametersBumped, tradeLastTime, fixStartTime, fixEndTime); volBumped[loops] -= 2 * shift; parametersBumped = HullWhiteOneFactorPiecewiseConstantParameters.of( MEAN_REVERSION, DoubleArray.copyOf(volBumped), VOLATILITY_TIME); double factorMinus = MODEL.futuresConvexityFactor(parametersBumped, tradeLastTime, fixStartTime, fixEndTime); sigmaBarExpected[loops] = (factorPlus - factorMinus) / (2 * shift); assertThat(sigmaBarExpected[loops]).isCloseTo(sigmaBar[loops], offset(TOLERANCE_RATE)); } } /** * Test the payment delay convexity adjustment factor. */ @Test public void paymentDelayConvexityFactor() { double startExpiryTime = 1.00; double endExpiryTime = 3.00; double startFixingPeriod = 3.05; double endFixingPeriod = 3.55; double paymentTime = 3.45; double hwMeanReversion = 0.011; // Constant volatility double hwEta = 0.02; HullWhiteOneFactorPiecewiseConstantParameters parameters = HullWhiteOneFactorPiecewiseConstantParameters.of( hwMeanReversion, DoubleArray.of(hwEta), DoubleArray.of()); double factor1 = (Math.exp(-hwMeanReversion * endFixingPeriod) - Math.exp(-hwMeanReversion * paymentTime)) * (Math.exp(-hwMeanReversion * endFixingPeriod) - Math.exp(-hwMeanReversion * startFixingPeriod)); double num = 2 * Math.pow(hwMeanReversion, 3); double factor2 = hwEta * hwEta * (Math.exp(2 * hwMeanReversion * endExpiryTime) - Math.exp(2 * hwMeanReversion * startExpiryTime)); double factorExpected = Math.exp(factor1 * factor2 / num); double factorComputed = MODEL.paymentDelayConvexityFactor(parameters, startExpiryTime, endExpiryTime, startFixingPeriod, endFixingPeriod, paymentTime); assertThat(factorExpected).isCloseTo(factorComputed, offset(TOLERANCE_RATE)); // Piecewise constant constant volatility double[] hwEtaP = new double[] {0.02, 0.021, 0.022, 0.023 }; double[] hwTime = new double[] {0.5, 1.0, 2.0 }; HullWhiteOneFactorPiecewiseConstantParameters parametersP = HullWhiteOneFactorPiecewiseConstantParameters.of( hwMeanReversion, DoubleArray.copyOf(hwEtaP), DoubleArray.copyOf(hwTime)); double factorP2 = hwEtaP[2] * hwEtaP[2] * (Math.exp(2 * hwMeanReversion * hwTime[2]) - Math.exp(2 * hwMeanReversion * startExpiryTime)); factorP2 += hwEtaP[3] * hwEtaP[3] * (Math.exp(2 * hwMeanReversion * endExpiryTime) - Math.exp(2 * hwMeanReversion * hwTime[2])); double factorPExpected = Math.exp(factor1 * factorP2 / num); double factorPComputed = MODEL.paymentDelayConvexityFactor( parametersP, startExpiryTime, endExpiryTime, startFixingPeriod, endFixingPeriod, paymentTime); assertThat(factorPExpected).isCloseTo(factorPComputed, offset(TOLERANCE_RATE)); } /** * Test the bond volatility (called alpha) vs a hard-coded value. */ @Test public void alpha() { double expiry1 = 0.25; double expiry2 = 2.25; double numeraire = 10.0; double maturity = 9.0; double alphaExpected = -0.015191631; double alpha = MODEL.alpha(MODEL_PARAMETERS, expiry1, expiry2, numeraire, maturity); //All data assertThat(alphaExpected).isCloseTo(alpha, offset(TOLERANCE_ALPHA)); alphaExpected = -0.015859116; alpha = MODEL.alpha(MODEL_PARAMETERS, 0.0, expiry2, numeraire, maturity); //From today assertThat(alphaExpected).isCloseTo(alpha, offset(TOLERANCE_ALPHA)); alphaExpected = 0.111299267; alpha = MODEL.alpha(MODEL_PARAMETERS, 0.0, expiry2, expiry2, maturity); // From today with expiry numeraire assertThat(alphaExpected).isCloseTo(alpha, offset(TOLERANCE_ALPHA)); alpha = MODEL.alpha(MODEL_PARAMETERS, 0.0, 0.0, numeraire, maturity); // From 0 to 0 assertThat(0.0d).isCloseTo(alpha, offset(TOLERANCE_ALPHA)); } /** * Test the adjoint algorithmic differentiation version of alpha. */ @Test public void alphaDSigma() { double expiry1 = 0.25; double expiry2 = 2.25; double numeraire = 10.0; double maturity = 9.0; int nbVolatility = VOLATILITY.size(); ValueDerivatives alphaDeriv = MODEL.alphaAdjoint(MODEL_PARAMETERS, expiry1, expiry2, numeraire, maturity); double alpha = alphaDeriv.getValue(); double[] alphaDerivatives = alphaDeriv.getDerivatives().toArray(); double alpha2 = MODEL.alpha(MODEL_PARAMETERS, expiry1, expiry2, numeraire, maturity); assertThat(alpha2).isCloseTo(alpha, offset(1.0E-10)); double shiftVol = 1.0E-6; double[] volatilityBumped = new double[nbVolatility]; System.arraycopy(VOLATILITY.toArray(), 0, volatilityBumped, 0, nbVolatility); double[] alphaBumpedPlus = new double[nbVolatility]; double[] alphaBumpedMinus = new double[nbVolatility]; HullWhiteOneFactorPiecewiseConstantParameters parametersBumped; for (int loopvol = 0; loopvol < nbVolatility; loopvol++) { volatilityBumped[loopvol] += shiftVol; parametersBumped = HullWhiteOneFactorPiecewiseConstantParameters.of( MEAN_REVERSION, DoubleArray.copyOf(volatilityBumped), VOLATILITY_TIME); alphaBumpedPlus[loopvol] = MODEL.alpha(parametersBumped, expiry1, expiry2, numeraire, maturity); volatilityBumped[loopvol] -= 2 * shiftVol; parametersBumped = HullWhiteOneFactorPiecewiseConstantParameters.of( MEAN_REVERSION, DoubleArray.copyOf(volatilityBumped), VOLATILITY_TIME); alphaBumpedMinus[loopvol] = MODEL.alpha(parametersBumped, expiry1, expiry2, numeraire, maturity); assertThat((alphaBumpedPlus[loopvol] - alphaBumpedMinus[loopvol]) / (2 * shiftVol)).isCloseTo(alphaDerivatives[loopvol], offset(1.0E-9)); volatilityBumped[loopvol] = VOLATILITY.get(loopvol); } } /** * Test the swaption exercise boundary. */ @Test public void kappa() { double[] cashFlowAmount = new double[] {-1.0, 0.05, 0.05, 0.05, 0.05, 1.05 }; double notional = 100000000; // 100m double[] cashFlowTime = new double[] {10.0, 11.0, 12.0, 13.0, 14.00, 15.00 }; double expiryTime = cashFlowTime[0] - 2.0 / 365.0; int nbCF = cashFlowAmount.length; double[] discountedCashFlow = new double[nbCF]; double[] alpha = new double[nbCF]; double rate = 0.04; for (int loopcf = 0; loopcf < nbCF; loopcf++) { discountedCashFlow[loopcf] = cashFlowAmount[loopcf] * Math.exp(-rate * cashFlowTime[loopcf]) * notional; alpha[loopcf] = MODEL.alpha(MODEL_PARAMETERS, 0.0, expiryTime, expiryTime, cashFlowTime[loopcf]); } double kappa = MODEL.kappa(DoubleArray.copyOf(discountedCashFlow), DoubleArray.copyOf(alpha)); double swapValue = 0.0; for (int loopcf = 0; loopcf < nbCF; loopcf++) { swapValue += discountedCashFlow[loopcf] * Math.exp(-Math.pow(alpha[loopcf], 2.0) / 2.0 - alpha[loopcf] * kappa); } assertThat(0.0).isCloseTo(swapValue, offset(1.0E-1)); } @Test public void swapRate() { double shift = 1.0E-4; double x = 0.1; double numerator = 0.0; for (int loopcf = 0; loopcf < DCF_IBOR.size(); loopcf++) { numerator += DCF_IBOR.get(loopcf) * Math.exp(-ALPHA_IBOR.get(loopcf) * x - 0.5 * ALPHA_IBOR.get(loopcf) * ALPHA_IBOR.get(loopcf)); } double denominator = 0.0; for (int loopcf = 0; loopcf < DCF_FIXED.size(); loopcf++) { denominator += DCF_FIXED.get(loopcf) * Math.exp(-ALPHA_FIXED.get(loopcf) * x - 0.5 * ALPHA_FIXED.get(loopcf) * ALPHA_FIXED.get(loopcf)); } double swapRateExpected = -numerator / denominator; double swapRateComputed = MODEL.swapRate(x, DCF_FIXED, ALPHA_FIXED, DCF_IBOR, ALPHA_IBOR); assertThat(swapRateExpected).isCloseTo(swapRateComputed, offset(TOLERANCE_RATE)); double swapRatePlus = MODEL.swapRate(x + shift, DCF_FIXED, ALPHA_FIXED, DCF_IBOR, ALPHA_IBOR); double swapRateMinus = MODEL.swapRate(x - shift, DCF_FIXED, ALPHA_FIXED, DCF_IBOR, ALPHA_IBOR); double swapRateDx1Expected = (swapRatePlus - swapRateMinus) / (2 * shift); double swapRateDx1Computed = MODEL.swapRateDx1(x, DCF_FIXED, ALPHA_FIXED, DCF_IBOR, ALPHA_IBOR); assertThat(swapRateDx1Expected).isCloseTo(swapRateDx1Computed, offset(TOLERANCE_RATE_DELTA)); double swapRateDx2Expected = (swapRatePlus + swapRateMinus - 2 * swapRateComputed) / (shift * shift); double swapRateDx2Computed = MODEL.swapRateDx2(x, DCF_FIXED, ALPHA_FIXED, DCF_IBOR, ALPHA_IBOR); assertThat(swapRateDx2Expected).isCloseTo(swapRateDx2Computed, offset(TOLERANCE_RATE_DELTA2)); } @Test public void swapRateDdcf() { double shift = 1.0E-8; double x = 0.0; ValueDerivatives computed = MODEL.swapRateDdcff1(x, DCF_FIXED, ALPHA_FIXED, DCF_IBOR, ALPHA_IBOR); double swapRateComputed = computed.getValue(); double[] ddcffComputed = computed.getDerivatives().toArray(); double swapRateExpected = MODEL.swapRate(x, DCF_FIXED, ALPHA_FIXED, DCF_IBOR, ALPHA_IBOR); assertThat(swapRateComputed).isCloseTo(swapRateExpected, offset(TOLERANCE_RATE)); double[] ddcffExpected = new double[DCF_FIXED.size()]; for (int loopcf = 0; loopcf < DCF_FIXED.size(); loopcf++) { double[] dsfBumped = DCF_FIXED.toArray(); dsfBumped[loopcf] += shift; double swapRatePlus = MODEL.swapRate(x, DoubleArray.copyOf(dsfBumped), ALPHA_FIXED, DCF_IBOR, ALPHA_IBOR); dsfBumped[loopcf] -= 2 * shift; double swapRateMinus = MODEL.swapRate(x, DoubleArray.copyOf(dsfBumped), ALPHA_FIXED, DCF_IBOR, ALPHA_IBOR); ddcffExpected[loopcf] = (swapRatePlus - swapRateMinus) / (2 * shift); } assertThat(DoubleArrayMath.fuzzyEquals(ddcffExpected, ddcffComputed, TOLERANCE_RATE_DELTA)).isTrue(); double[] ddcfiExpected = new double[DCF_IBOR.size()]; for (int loopcf = 0; loopcf < DCF_IBOR.size(); loopcf++) { double[] dsfBumped = DCF_IBOR.toArray(); dsfBumped[loopcf] += shift; double swapRatePlus = MODEL.swapRate(x, DCF_FIXED, ALPHA_FIXED, DoubleArray.copyOf(dsfBumped), ALPHA_IBOR); dsfBumped[loopcf] -= 2 * shift; double swapRateMinus = MODEL.swapRate(x, DCF_FIXED, ALPHA_FIXED, DoubleArray.copyOf(dsfBumped), ALPHA_IBOR); ddcfiExpected[loopcf] = (swapRatePlus - swapRateMinus) / (2 * shift); } double[] ddcfiComputed = MODEL.swapRateDdcfi1(x, DCF_FIXED, ALPHA_FIXED, DCF_IBOR, ALPHA_IBOR) .getDerivatives().toArray(); assertThat(DoubleArrayMath.fuzzyEquals(ddcfiExpected, ddcfiComputed, TOLERANCE_RATE_DELTA)).isTrue(); } @Test public void swapRateDa() { double shift = 1.0E-8; double x = 0.0; ValueDerivatives computed = MODEL.swapRateDaf1(x, DCF_FIXED, ALPHA_FIXED, DCF_IBOR, ALPHA_IBOR); double swapRateComputed = computed.getValue(); double[] dafComputed = computed.getDerivatives().toArray(); double swapRateExpected = MODEL.swapRate(x, DCF_FIXED, ALPHA_FIXED, DCF_IBOR, ALPHA_IBOR); assertThat(swapRateComputed).isCloseTo(swapRateExpected, offset(TOLERANCE_RATE)); double[] dafExpected = new double[ALPHA_FIXED.size()]; for (int loopcf = 0; loopcf < ALPHA_FIXED.size(); loopcf++) { double[] afBumped = ALPHA_FIXED.toArray(); afBumped[loopcf] += shift; double swapRatePlus = MODEL.swapRate(x, DCF_FIXED, DoubleArray.copyOf(afBumped), DCF_IBOR, ALPHA_IBOR); afBumped[loopcf] -= 2 * shift; double swapRateMinus = MODEL.swapRate(x, DCF_FIXED, DoubleArray.copyOf(afBumped), DCF_IBOR, ALPHA_IBOR); dafExpected[loopcf] = (swapRatePlus - swapRateMinus) / (2 * shift); } assertThat(DoubleArrayMath.fuzzyEquals(dafExpected, dafComputed, TOLERANCE_RATE_DELTA)).isTrue(); double[] daiExpected = new double[DCF_IBOR.size()]; for (int loopcf = 0; loopcf < DCF_IBOR.size(); loopcf++) { double[] aiBumped = ALPHA_IBOR.toArray(); aiBumped[loopcf] += shift; double swapRatePlus = MODEL.swapRate(x, DCF_FIXED, ALPHA_FIXED, DCF_IBOR, DoubleArray.copyOf(aiBumped)); aiBumped[loopcf] -= 2 * shift; double swapRateMinus = MODEL.swapRate(x, DCF_FIXED, ALPHA_FIXED, DCF_IBOR, DoubleArray.copyOf(aiBumped)); daiExpected[loopcf] = (swapRatePlus - swapRateMinus) / (2 * shift); } double[] daiComputed = MODEL.swapRateDai1(x, DCF_FIXED, ALPHA_FIXED, DCF_IBOR, ALPHA_IBOR).getDerivatives().toArray(); assertThat(DoubleArrayMath.fuzzyEquals(daiExpected, daiComputed, TOLERANCE_RATE_DELTA)).isTrue(); } @Test public void swapRateDx2Ddcf() { double shift = 1.0E-7; double x = 0.0; Pair<DoubleArray, DoubleArray> dx2ddcfComputed = MODEL.swapRateDx2Ddcf1(x, DCF_FIXED, ALPHA_FIXED, DCF_IBOR, ALPHA_IBOR); double[] dx2DdcffExpected = new double[DCF_FIXED.size()]; for (int loopcf = 0; loopcf < DCF_FIXED.size(); loopcf++) { double[] dsfBumped = DCF_FIXED.toArray(); dsfBumped[loopcf] += shift; double swapRatePlus = MODEL.swapRateDx2(x, DoubleArray.copyOf(dsfBumped), ALPHA_FIXED, DCF_IBOR, ALPHA_IBOR); dsfBumped[loopcf] -= 2 * shift; double swapRateMinus = MODEL.swapRateDx2(x, DoubleArray.copyOf(dsfBumped), ALPHA_FIXED, DCF_IBOR, ALPHA_IBOR); dx2DdcffExpected[loopcf] = (swapRatePlus - swapRateMinus) / (2 * shift); } assertThat(DoubleArrayMath.fuzzyEquals(dx2DdcffExpected, dx2ddcfComputed.getFirst().toArray(), TOLERANCE_RATE_DELTA2)).isTrue(); double[] dx2DdcfiExpected = new double[DCF_IBOR.size()]; for (int loopcf = 0; loopcf < DCF_IBOR.size(); loopcf++) { double[] dsfBumped = DCF_IBOR.toArray(); dsfBumped[loopcf] += shift; double swapRatePlus = MODEL.swapRateDx2(x, DCF_FIXED, ALPHA_FIXED, DoubleArray.copyOf(dsfBumped), ALPHA_IBOR); dsfBumped[loopcf] -= 2 * shift; double swapRateMinus = MODEL.swapRateDx2(x, DCF_FIXED, ALPHA_FIXED, DoubleArray.copyOf(dsfBumped), ALPHA_IBOR); dx2DdcfiExpected[loopcf] = (swapRatePlus - swapRateMinus) / (2 * shift); } assertThat(DoubleArrayMath.fuzzyEquals(dx2DdcfiExpected, dx2ddcfComputed.getSecond().toArray(), TOLERANCE_RATE_DELTA2)).isTrue(); } @Test public void swapRateDx2Da() { double shift = 1.0E-7; double x = 0.0; Pair<DoubleArray, DoubleArray> dx2DaComputed = MODEL.swapRateDx2Da1(x, DCF_FIXED, ALPHA_FIXED, DCF_IBOR, ALPHA_IBOR); double[] dx2DafExpected = new double[DCF_FIXED.size()]; for (int loopcf = 0; loopcf < DCF_FIXED.size(); loopcf++) { double[] afBumped = ALPHA_FIXED.toArray(); afBumped[loopcf] += shift; double swapRatePlus = MODEL.swapRateDx2(x, DCF_FIXED, DoubleArray.copyOf(afBumped), DCF_IBOR, ALPHA_IBOR); afBumped[loopcf] -= 2 * shift; double swapRateMinus = MODEL.swapRateDx2(x, DCF_FIXED, DoubleArray.copyOf(afBumped), DCF_IBOR, ALPHA_IBOR); dx2DafExpected[loopcf] = (swapRatePlus - swapRateMinus) / (2 * shift); } assertThat(DoubleArrayMath.fuzzyEquals(dx2DafExpected, dx2DaComputed.getFirst().toArray(), TOLERANCE_RATE_DELTA2)).isTrue(); double[] dx2DaiExpected = new double[DCF_IBOR.size()]; for (int loopcf = 0; loopcf < DCF_IBOR.size(); loopcf++) { double[] aiBumped = ALPHA_IBOR.toArray(); aiBumped[loopcf] += shift; double swapRatePlus = MODEL.swapRateDx2(x, DCF_FIXED, ALPHA_FIXED, DCF_IBOR, DoubleArray.copyOf(aiBumped)); aiBumped[loopcf] -= 2 * shift; double swapRateMinus = MODEL.swapRateDx2(x, DCF_FIXED, ALPHA_FIXED, DCF_IBOR, DoubleArray.copyOf(aiBumped)); dx2DaiExpected[loopcf] = (swapRatePlus - swapRateMinus) / (2 * shift); } assertThat(DoubleArrayMath.fuzzyEquals(dx2DaiExpected, dx2DaComputed.getSecond().toArray(), TOLERANCE_RATE_DELTA2)).isTrue(); } //------------------------------------------------------------------------- // Here methods used for Bermudan swaption pricing and Monte-Carlo are test weakly by regression to 2.x. // Proper tests should be added when these pricing methodologies are available. @Test public void test_beta() { double[] theta = new double[] { 0.0, 0.9930234298974474, 1.5013698630136987, 1.9917808219178081, 2.5013698630136987, 2.9972602739726026, 3.5013698630136987, 3.9972602739726026, 4.501220151208923, 4.998487910771765, 5.495890410958904}; double[] expected = new double[] { 0.010526360888642377, 0.008653752074472373, 0.008551601997542554, 0.009479708049949437, 0.009409731278859806, 0.009534948404597303, 0.009504300650429525, 0.009629338816014276, 0.009613195012744198, 0.010403528524805543}; for (int i = 0; i < theta.length - 1; ++i) { assertThat(MODEL.beta(MODEL_PARAMETERS, theta[i], theta[i + 1])).isCloseTo(expected[i], offset(TOLERANCE_RATE)); } } @Test public void test_lambda() { DoubleArray cashFlow = DoubleArray.of(1.1342484780379178E8, 178826.75595605336, -1.1353458434950349E8); DoubleArray alphaSq = DoubleArray.of(0.0059638289722142215, 0.0069253776359785415, 0.007985436623619701); DoubleArray hwH = DoubleArray.of(5.357967757629822, 5.593630711441366, 5.828706853806842); double computed = MODEL.lambda(cashFlow, alphaSq, hwH); assertThat(computed).isCloseTo(-0.0034407112369635212, offset(TOLERANCE_RATE)); double value = 0.0; for (int loopcf = 0; loopcf < 3; loopcf++) { value += cashFlow.get(loopcf) * Math.exp(-0.5 * alphaSq.get(loopcf) - hwH.get(loopcf) * computed); } assertThat(value).isCloseTo(0d, offset(1.0E-7)); } @Test public void test_volatilityMaturityPart() { double u = 5.001332435062505; DoubleMatrix v = DoubleMatrix.copyOf(new double[][] { {5.012261396811139, 5.515068493150685, 6.010958904109589, 6.515068493150685, 7.010958904109589, 7.515068493150685, 8.01095890410959, 8.520458118122614, 9.017725877685455, 9.515068493150684, 10.013698630136986}}); DoubleMatrix computed = MODEL.volatilityMaturityPart(MODEL_PARAMETERS, u, v); double[] expected = new double[] {0.010395243419747402, 0.48742124221025085, 0.9555417903726049, 1.4290478001940943, 1.8925104710768026, 2.361305017379811, 2.8201561576361778, 3.289235677728508, 3.7447552766260217, 4.198083407732067, 4.650327387669373}; assertThat(DoubleArrayMath.fuzzyEquals(computed.row(0).toArray(), expected, TOLERANCE_RATE)).isTrue(); } //------------------------------------------------------------------------- @Test public void coverage() { coverImmutableBean(MODEL); } @Test public void test_serialization() { assertSerialization(MODEL); } //------------------------------------------------------------------------- /** * Tests of performance. "enabled = false" for the standard testing. */ @Disabled public void performanceAlphaAdjoint() { double expiry1 = 0.25; double expiry2 = 2.25; double numeraire = 10.0; double maturity = 9.0; int nbVolatility = VOLATILITY.size(); long startTime, endTime; int nbTest = 100000; double alpha = 0.0; startTime = System.currentTimeMillis(); for (int looptest = 0; looptest < nbTest; looptest++) { alpha = MODEL.alpha(MODEL_PARAMETERS, expiry1, expiry2, numeraire, maturity); } endTime = System.currentTimeMillis(); System.out.println(nbTest + " alpha Hull-White: " + (endTime - startTime) + " ms"); startTime = System.currentTimeMillis(); for (int looptest = 0; looptest < nbTest; looptest++) { ValueDerivatives computed = MODEL.alphaAdjoint(MODEL_PARAMETERS, expiry1, expiry2, numeraire, maturity); alpha = computed.getValue(); } endTime = System.currentTimeMillis(); System.out.println(nbTest + " alpha Hull-White adjoint (value+" + nbVolatility + " derivatives): " + (endTime - startTime) + " ms"); // Performance note: value: 31-Aug-11: On Mac Pro 3.2 GHz Quad-Core Intel Xeon: 75 ms for 1000000 swaptions. // Performance note: value+derivatives: 31-Aug-11: On Mac Pro 3.2 GHz Quad-Core Intel Xeon: 100 ms for 1000000 swaptions. System.out.println("Alpha: " + alpha); } /** * Test the payment delay convexity adjustment factor. Analysis of the size. * In normal test, should have (enabled=false) */ @Disabled public void paymentDelayConvexityFactorAnalysis() { double hwMeanReversion = 0.01; double rate = 0.02; double[] tenorTime = {0.25, 0.50 }; int nbTenors = tenorTime.length; double[] lagPayTime = {1.0d / 365.0d, 2.0d / 365.0d, 7.0d / 365.0d }; int nbLags = lagPayTime.length; double lagFixTime = 2.0d / 365.0d; int nbPeriods = 120; double startTimeFirst = 0.25; double startTimeStep = 0.25; double[] startTime = new double[nbPeriods]; for (int loopp = 0; loopp < nbPeriods; loopp++) { startTime[loopp] = startTimeFirst + loopp * startTimeStep; } // Constant volatility double hwEta = 0.02; HullWhiteOneFactorPiecewiseConstantParameters parameters = HullWhiteOneFactorPiecewiseConstantParameters.of( hwMeanReversion, DoubleArray.of(hwEta), DoubleArray.of(0)); double[][][] factor = new double[nbTenors][nbLags][nbPeriods]; double[][][] adj = new double[nbTenors][nbLags][nbPeriods]; for (int loopt = 0; loopt < nbTenors; loopt++) { for (int loopl = 0; loopl < nbLags; loopl++) { for (int loopp = 0; loopp < nbPeriods; loopp++) { factor[loopt][loopl][loopp] = MODEL.paymentDelayConvexityFactor(parameters, 0, startTime[loopp] - lagFixTime, startTime[loopp], startTime[loopp] + tenorTime[loopt], startTime[loopp] + tenorTime[loopt] - lagPayTime[loopl]); adj[loopt][loopl][loopp] = (1.0d / tenorTime[loopt] - rate) * (factor[loopt][loopl][loopp] - 1); } } } @SuppressWarnings("unused") int t = 0; t++; } }
apache-2.0
janarvid/jmtools
src/main/org/veggeberg/jmtools/progarchives/Genre.java
1695
package org.veggeberg.jmtools.progarchives; public enum Genre { CANTEBURY_SCENE(12, "Canterbury Scene", "CanterburyScene"), CROSSOVER_PROG(3, "Crossover Prog", "CrossoverProg"), ECLECTIC_PROG(42, "Eclectic Prog", "EclecticProg"), EXPERIMENTAL_POST_METAL(44, "Experimental/Post Metal", "ExperimentalPostMetal"), HEAVY_PROG(41, "Heavy Prog", "HeavyProg"), INDO_PROG_RAGA_ROCK(35, "Indo-Prog/Raga Rock", "Indo-ProgRagaRock"), JAZZ_ROCK_FUSION(30, "Jazz Rock/Fusion", "JazzRockFusion"), KRAUTROCK(17, "Krautrock", "Krautrock"), NEO_PROG(18, "Neo-Prog", "Neo-Prog"), POST_ROCK_MATH_ROCK(32, "Post Rock/Math Rock", "PostRockMathRock"), PROG_FOLK(6, "Prog Folk", "ProgFolk"), PROGRESSIVE_ELECTRONIC(33, "Progressive Electronic", "Progressive Electronic"), PROGRESSIVE_METAL(19, "Progressive Metal", "ProgressiveMetal"), PSYCEDLIC_SPACE_ROCK(15, "Psychedelic/Space Rock", "PsychedelicSpaceRock"), RIO_AVANT_PROG(36, "RIO/Avant-Prog", "RIOAvant-Prog"), ROCK_PROGRESSIVO_ITALIANO(28, "Rock Progressivo Italiano", "RockProgressivoItaliano"), SYMPHONIC_PROG(4, "Symphonic Prog", "SymphonicProg"), TECH_EXTREME_PROG_METAL(43, "Tech/Extreme Prog Metal", "TechExtremeProgMetal"), ZEUHL(11, "Zeuhl", "Zeuhl"), VARIOUS_GENRES(29, "Various Genres/Artists", "VariousGenresArtists"), PROG_RELATED(38, "Prog Related", "ProgRelated"), PROTO_PROG(37, "Proto-Prog", "Proto-Prog"); Genre(int id, String name, String dirName) { this.id = id; this.name = name; this.dirName = dirName; } private int id; private String name; private String dirName; public int getId() { return id; } public String getName() { return name; } public String getDirName() { return dirName; } }
apache-2.0
brianm/muckery
src/test/java/org/skife/muckery/jmx/MBeanTest.java
507
package org.skife.muckery.jmx; import org.junit.Test; import java.lang.management.ManagementFactory; import static org.assertj.core.api.Assertions.assertThat; public class MBeanTest { @Test public void testFoo() throws Exception { int threads = ManagementFactory.getThreadMXBean().getThreadCount(); Thread t = new Thread(() -> { }); t.start(); t.join(); assertThat(ManagementFactory.getThreadMXBean().getThreadCount()).isEqualTo(threads); } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-core/src/main/java/com/amazonaws/monitoring/EnvironmentVariableCsmConfigurationProvider.java
2919
/* * Copyright 2010-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.monitoring; import com.amazonaws.SdkClientException; import com.amazonaws.annotation.ThreadSafe; import static com.amazonaws.SDKGlobalConfiguration.AWS_CSM_CLIENT_ID_ENV_VAR; import static com.amazonaws.SDKGlobalConfiguration.AWS_CSM_ENABLED_ENV_VAR; import static com.amazonaws.SDKGlobalConfiguration.AWS_CSM_HOST_ENV_VAR; import static com.amazonaws.SDKGlobalConfiguration.AWS_CSM_PORT_ENV_VAR; import static com.amazonaws.SDKGlobalConfiguration.DEFAULT_AWS_CSM_CLIENT_ID; import static com.amazonaws.SDKGlobalConfiguration.DEFAULT_AWS_CSM_PORT; import static com.amazonaws.SDKGlobalConfiguration.DEFAULT_AWS_CSM_HOST; /** * Configuration provider that sources the client side monitoring * configuration parameters from environment variables. * * @see com.amazonaws.SDKGlobalConfiguration#AWS_CSM_CLIENT_ID_ENV_VAR * @see com.amazonaws.SDKGlobalConfiguration#AWS_CSM_ENABLED_ENV_VAR * @see com.amazonaws.SDKGlobalConfiguration#AWS_CSM_PORT_ENV_VAR */ @ThreadSafe public final class EnvironmentVariableCsmConfigurationProvider implements CsmConfigurationProvider { @Override public CsmConfiguration getConfiguration() { String enabled = System.getenv(AWS_CSM_ENABLED_ENV_VAR); if (enabled == null) { throw new SdkClientException("Unable to load Client Side Monitoring configurations from" + " environment variables!"); } String host = System.getenv(AWS_CSM_HOST_ENV_VAR); host = host == null ? DEFAULT_AWS_CSM_HOST : host; String port = System.getenv(AWS_CSM_PORT_ENV_VAR); String clientId = System.getenv(AWS_CSM_CLIENT_ID_ENV_VAR); clientId = clientId == null ? DEFAULT_AWS_CSM_CLIENT_ID : clientId; try { int portNumber = port == null ? DEFAULT_AWS_CSM_PORT : Integer.parseInt(port); return CsmConfiguration.builder() .withEnabled(Boolean.parseBoolean(enabled)) .withHost(host) .withPort(portNumber) .withClientId(clientId) .build(); } catch (Exception e) { throw new SdkClientException("Unable to load Client Side Monitoring configurations from" + " environment variables!", e); } } }
apache-2.0
ThPerson/DogLiveShow
app/src/main/java/com/example/mypc/dogliveshow/bean/homepagetitle/DataListBottomBean.java
12582
package com.example.mypc.dogliveshow.bean.homepagetitle; import com.google.gson.annotations.SerializedName; import java.util.List; public class DataListBottomBean { /** * map : {"follows":0,"gamename_enum":1035,"channel_enum":2,"rawcoverimage":"http://i5.pdim.gs/45/ee1ccbb4c977b571f6c900ac7c31bfe9/w338/h190.jpg","off_enum":0,"sourcename":"熊猫","gameurl":"http://www.panda.tv/cate/yzdr","id":"9938883881563224996","roomid":485118,"sourcesite":"www.panda.tv","title":"Hello!女神上帝视角","allnum":0,"queryname_enum":58,"invalid":0,"nocopyright":0,"name":"娱乐联萌","topsort":"topsort","usehtml5":1,"fixrank":0,"sogoupv":46164,"gamecoverimage_width":0,"html5_allowpopup":1,"commentator":"女神主直播间","renqi":144746,"viewers":3610918,"html5_autoplay":1,"url":"http://www.panda.tv/485118","rawcommentatorimage":"http://i7.pdim.gs/0257040380bd7d294a8017532c07e39b.jpg","gamecoverimage_height":0,"html5_url":"http://m.panda.tv/room.html?roomid=485118","finalrank":1,"infotype":"live","sourcename_enum":6,"channel":"秀场"} */ @SerializedName("myArrayList") private List<MyArrayListBean> myArrayList; public List<MyArrayListBean> getMyArrayList() { return myArrayList; } public void setMyArrayList(List<MyArrayListBean> myArrayList) { this.myArrayList = myArrayList; } public static class MyArrayListBean { /** * follows : 0 * gamename_enum : 1035 * channel_enum : 2 * rawcoverimage : http://i5.pdim.gs/45/ee1ccbb4c977b571f6c900ac7c31bfe9/w338/h190.jpg * off_enum : 0 * sourcename : 熊猫 * gameurl : http://www.panda.tv/cate/yzdr * id : 9938883881563224996 * roomid : 485118 * sourcesite : www.panda.tv * title : Hello!女神上帝视角 * allnum : 0 * queryname_enum : 58 * invalid : 0 * nocopyright : 0 * name : 娱乐联萌 * topsort : topsort * usehtml5 : 1 * fixrank : 0 * sogoupv : 46164 * gamecoverimage_width : 0 * html5_allowpopup : 1 * commentator : 女神主直播间 * renqi : 144746 * viewers : 3610918 * html5_autoplay : 1 * url : http://www.panda.tv/485118 * rawcommentatorimage : http://i7.pdim.gs/0257040380bd7d294a8017532c07e39b.jpg * gamecoverimage_height : 0 * html5_url : http://m.panda.tv/room.html?roomid=485118 * finalrank : 1.0 * infotype : live * sourcename_enum : 6 * channel : 秀场 */ @SerializedName("map") private MapBean map; public MapBean getMap() { return map; } public void setMap(MapBean map) { this.map = map; } public static class MapBean { @SerializedName("follows") private int follows; @SerializedName("gamename_enum") private int gamenameEnum; @SerializedName("channel_enum") private int channelEnum; @SerializedName("rawcoverimage") private String rawcoverimage; @SerializedName("off_enum") private int offEnum; @SerializedName("sourcename") private String sourcename; @SerializedName("gameurl") private String gameurl; @SerializedName("id") private String id; @SerializedName("roomid") private int roomid; @SerializedName("sourcesite") private String sourcesite; @SerializedName("title") private String title; @SerializedName("allnum") private int allnum; @SerializedName("queryname_enum") private int querynameEnum; @SerializedName("invalid") private int invalid; @SerializedName("nocopyright") private int nocopyright; @SerializedName("name") private String name; @SerializedName("topsort") private String topsort; @SerializedName("usehtml5") private int usehtml5; @SerializedName("fixrank") private int fixrank; @SerializedName("sogoupv") private int sogoupv; @SerializedName("gamecoverimage_width") private int gamecoverimageWidth; @SerializedName("html5_allowpopup") private int html5Allowpopup; @SerializedName("commentator") private String commentator; @SerializedName("renqi") private int renqi; @SerializedName("viewers") private int viewers; @SerializedName("html5_autoplay") private int html5Autoplay; @SerializedName("url") private String url; @SerializedName("rawcommentatorimage") private String rawcommentatorimage; @SerializedName("gamecoverimage_height") private int gamecoverimageHeight; @SerializedName("html5_url") private String html5Url; @SerializedName("finalrank") private double finalrank; @SerializedName("infotype") private String infotype; @SerializedName("sourcename_enum") private int sourcenameEnum; @SerializedName("channel") private String channel; public int getFollows() { return follows; } public void setFollows(int follows) { this.follows = follows; } public int getGamenameEnum() { return gamenameEnum; } public void setGamenameEnum(int gamenameEnum) { this.gamenameEnum = gamenameEnum; } public int getChannelEnum() { return channelEnum; } public void setChannelEnum(int channelEnum) { this.channelEnum = channelEnum; } public String getRawcoverimage() { return rawcoverimage; } public void setRawcoverimage(String rawcoverimage) { this.rawcoverimage = rawcoverimage; } public int getOffEnum() { return offEnum; } public void setOffEnum(int offEnum) { this.offEnum = offEnum; } public String getSourcename() { return sourcename; } public void setSourcename(String sourcename) { this.sourcename = sourcename; } public String getGameurl() { return gameurl; } public void setGameurl(String gameurl) { this.gameurl = gameurl; } public String getId() { return id; } public void setId(String id) { this.id = id; } public int getRoomid() { return roomid; } public void setRoomid(int roomid) { this.roomid = roomid; } public String getSourcesite() { return sourcesite; } public void setSourcesite(String sourcesite) { this.sourcesite = sourcesite; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public int getAllnum() { return allnum; } public void setAllnum(int allnum) { this.allnum = allnum; } public int getQuerynameEnum() { return querynameEnum; } public void setQuerynameEnum(int querynameEnum) { this.querynameEnum = querynameEnum; } public int getInvalid() { return invalid; } public void setInvalid(int invalid) { this.invalid = invalid; } public int getNocopyright() { return nocopyright; } public void setNocopyright(int nocopyright) { this.nocopyright = nocopyright; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getTopsort() { return topsort; } public void setTopsort(String topsort) { this.topsort = topsort; } public int getUsehtml5() { return usehtml5; } public void setUsehtml5(int usehtml5) { this.usehtml5 = usehtml5; } public int getFixrank() { return fixrank; } public void setFixrank(int fixrank) { this.fixrank = fixrank; } public int getSogoupv() { return sogoupv; } public void setSogoupv(int sogoupv) { this.sogoupv = sogoupv; } public int getGamecoverimageWidth() { return gamecoverimageWidth; } public void setGamecoverimageWidth(int gamecoverimageWidth) { this.gamecoverimageWidth = gamecoverimageWidth; } public int getHtml5Allowpopup() { return html5Allowpopup; } public void setHtml5Allowpopup(int html5Allowpopup) { this.html5Allowpopup = html5Allowpopup; } public String getCommentator() { return commentator; } public void setCommentator(String commentator) { this.commentator = commentator; } public int getRenqi() { return renqi; } public void setRenqi(int renqi) { this.renqi = renqi; } public int getViewers() { return viewers; } public void setViewers(int viewers) { this.viewers = viewers; } public int getHtml5Autoplay() { return html5Autoplay; } public void setHtml5Autoplay(int html5Autoplay) { this.html5Autoplay = html5Autoplay; } public String getUrl() { return url; } public void setUrl(String url) { this.url = url; } public String getRawcommentatorimage() { return rawcommentatorimage; } public void setRawcommentatorimage(String rawcommentatorimage) { this.rawcommentatorimage = rawcommentatorimage; } public int getGamecoverimageHeight() { return gamecoverimageHeight; } public void setGamecoverimageHeight(int gamecoverimageHeight) { this.gamecoverimageHeight = gamecoverimageHeight; } public String getHtml5Url() { return html5Url; } public void setHtml5Url(String html5Url) { this.html5Url = html5Url; } public double getFinalrank() { return finalrank; } public void setFinalrank(double finalrank) { this.finalrank = finalrank; } public String getInfotype() { return infotype; } public void setInfotype(String infotype) { this.infotype = infotype; } public int getSourcenameEnum() { return sourcenameEnum; } public void setSourcenameEnum(int sourcenameEnum) { this.sourcenameEnum = sourcenameEnum; } public String getChannel() { return channel; } public void setChannel(String channel) { this.channel = channel; } } } }
apache-2.0
mcharmas/android-parcelable-intellij-plugin
src/pl/charmas/parcelablegenerator/typeserializers/serializers/ShortPrimitiveSerializer.java
613
package pl.charmas.parcelablegenerator.typeserializers.serializers; import pl.charmas.parcelablegenerator.typeserializers.SerializableValue; import pl.charmas.parcelablegenerator.typeserializers.TypeSerializer; public class ShortPrimitiveSerializer implements TypeSerializer { @Override public String writeValue(SerializableValue field, String parcel, String flags) { return parcel + ".writeInt(" + field.getName() + ");"; } @Override public String readValue(SerializableValue field, String parcel) { return field.getName() + " = (short) " + parcel + ".readInt();"; } }
apache-2.0
thaibui/algorithms-core
leetcode/src/test/java/org/bui/algorithms/leetcode/WordLadderIITest.java
2069
/* * Copyright 2016. Thai Bui * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bui.algorithms.leetcode; import org.hamcrest.Matchers; import org.junit.Test; import java.util.Arrays; import java.util.HashSet; import java.util.List; import static org.bui.algorithms.leetcode.WordLadderII.findLadders; import static org.hamcrest.MatcherAssert.assertThat; import static org.junit.Assert.assertTrue; public class WordLadderIITest { @Test public void testNoShortestPath() { List<List<String>> solution = findLadders("hot", "dog", new HashSet<>(Arrays.asList("hot", "dog"))); assertTrue("Result should be empty", solution.isEmpty()); } @SuppressWarnings("unchecked") @Test public void testMultipleSolutions() { List<List<String>> solution1 = findLadders("hit", "cog", new HashSet<>(Arrays.asList("hot","dot","dog","lot","log"))); assertThat("Result should contain correct solutions", solution1, Matchers.containsInAnyOrder( Arrays.asList("hit", "hot", "lot", "log", "cog"), Arrays.asList("hit", "hot", "dot", "dog", "cog") )); } @SuppressWarnings("unchecked") @Test public void testMultipleSolutionsSpecialCase() { List<List<String>> solution1 = findLadders("hit", "coh", new HashSet<>(Arrays.asList("hot","dot","dog","lot","log", "cog"))); assertThat("Result should contain correct solutions", solution1, Matchers.containsInAnyOrder( Arrays.asList("hit", "hot", "lot", "log", "cog", "coh"), Arrays.asList("hit", "hot", "dot", "dog", "cog", "coh") )); } }
apache-2.0
ynhenggg/XBase
XBase/src/main/java/ynheng/x/base/rx/RxLifeManager.java
3310
package ynheng.x.base.rx; import android.util.SparseArray; import io.reactivex.Observable; import io.reactivex.ObservableSource; import io.reactivex.ObservableTransformer; import io.reactivex.disposables.CompositeDisposable; import io.reactivex.disposables.Disposable; import io.reactivex.functions.Action; import io.reactivex.functions.Consumer; import ynheng.x.util.ObjectUtils; /** * Created by ynheng on 2017/4/12.<BR/> * Function/effect:<BR/> * <BR/> */ public class RxLifeManager { private RxLifeManager() { throw new UnsupportedOperationException("can't instantiate a Utility class."); } private static SparseArray<CompositeDisposable> compositeDisposables = new SparseArray<>(); /** * 将rx事件流与生命周期绑定,并管理disposable * <p> * add when doOnSubscribe(), remove when doOnError()或doOnComplete() * * @param host 宿主--配额生命周期管理事件流的key值,建议传入presenter的hashCode()值 * @return 调用 .compose(...)时作为参数使用 */ public static <T> ObservableTransformer<T, T> bind(Class<T> streamType, Object host) { int hash = checkHash(host); CompositeDisposable compositeDisposable = compositeDisposables.get(hash); if (ObjectUtils.isNull(compositeDisposable)) { compositeDisposable = new CompositeDisposable(); compositeDisposables.put(hash, compositeDisposable); } return handDisposable(compositeDisposable); } /** * 取消所有的已绑定的rx事件流,在presenter的detach()中调用 * * @param object 配额生命周期管理事件流的key值,建议传入presenter的hashCode()值 */ public static void unBind(Object object) { int hash = checkHash(object); CompositeDisposable compositeDisposable = compositeDisposables.get(hash); if (ObjectUtils.nonNull(compositeDisposable)) { if (!compositeDisposable.isDisposed()) { compositeDisposable.dispose(); } compositeDisposables.remove(hash); } } /** * 如果传入的是int型,说明参数直接就是hash值,强转后return * <p> * 否则return所传入对象的hashCode()值 */ private static int checkHash(Object object) { int hash; if (object instanceof Integer) { hash = (int) object; } else { hash = object.hashCode(); } return hash; } /** * 管理disposable * <p> * add when doOnSubscribe(), remove when doOnError()或doOnComplete() * * @param disposables CompositeDisposable管理disposable的集合 */ private static <T> ObservableTransformer<T, T> handDisposable(final CompositeDisposable disposables) { return new ObservableTransformer<T, T>() { @Override public ObservableSource<T> apply(Observable<T> upstream) { final Disposable[] outDisposable = new Disposable[1]; return upstream.doOnSubscribe(new Consumer<Disposable>() { @Override public void accept(Disposable disposable) throws Exception { outDisposable[0] = disposable; disposables.add(outDisposable[0]); } }).doOnError(new Consumer<Throwable>() { @Override public void accept(Throwable throwable) throws Exception { disposables.remove(outDisposable[0]); } }).doOnComplete(new Action() { @Override public void run() throws Exception { disposables.remove(outDisposable[0]); } }); } }; } }
apache-2.0
MICommunity/psi-jami
jami-mitab/src/main/java/psidev/psi/mi/jami/tab/extension/DefaultMitabFeature.java
2472
package psidev.psi.mi.jami.tab.extension; import psidev.psi.mi.jami.datasource.FileSourceContext; import psidev.psi.mi.jami.datasource.FileSourceLocator; import psidev.psi.mi.jami.model.CvTerm; import psidev.psi.mi.jami.model.Entity; import psidev.psi.mi.jami.model.Feature; import psidev.psi.mi.jami.model.impl.DefaultFeature; /** * A DefaultMitabFeature is a feature in MITAB with some free text. * * It can be ModelledFeature of FeatureEvidence * * @author Marine Dumousseau (marine@ebi.ac.uk) * @version $Id$ * @since <pre>07/06/13</pre> */ public class DefaultMitabFeature extends DefaultFeature implements MitabFeature<Entity, Feature>, FileSourceContext{ private String text; private FileSourceLocator sourceLocator; /** * <p>Constructor for DefaultMitabFeature.</p> */ public DefaultMitabFeature() { super(); } /** * <p>Constructor for DefaultMitabFeature.</p> * * @param type a {@link psidev.psi.mi.jami.model.CvTerm} object. */ public DefaultMitabFeature(CvTerm type) { super(type); } /** * <p>Constructor for DefaultMitabFeature.</p> * * @param type a {@link java.lang.String} object. */ public DefaultMitabFeature(String type) { super(new MitabCvTerm(type)); } /** * <p>Constructor for DefaultMitabFeature.</p> * * @param type a {@link psidev.psi.mi.jami.model.CvTerm} object. * @param interpro a {@link java.lang.String} object. */ public DefaultMitabFeature(CvTerm type, String interpro) { super(type, interpro); } /** * <p>Getter for the field <code>text</code>.</p> * * @return a {@link java.lang.String} object. */ public String getText() { return text; } /** {@inheritDoc} */ public void setText(String text) { this.text = text; } /** * <p>Getter for the field <code>sourceLocator</code>.</p> * * @return a {@link psidev.psi.mi.jami.datasource.FileSourceLocator} object. */ public FileSourceLocator getSourceLocator() { return sourceLocator; } /** {@inheritDoc} */ public void setSourceLocator(FileSourceLocator sourceLocator) { this.sourceLocator = sourceLocator; } /** {@inheritDoc} */ @Override public String toString() { return (getSourceLocator() != null ? "Feature: "+getSourceLocator().toString():super.toString()); } }
apache-2.0
juhasipo/JMobster
core/src/test/java/fi/vincit/jmobster/util/test/TestUtil.java
7298
package fi.vincit.jmobster.util.test;/* * Copyright 2012 Juha Siponen * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Copyright 2012-2013 Juha Siponen * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import fi.vincit.jmobster.processor.model.FieldAnnotation; import fi.vincit.jmobster.processor.model.ModelField; import fi.vincit.jmobster.util.collection.AnnotationBag; import org.junit.Assert; import java.beans.BeanInfo; import java.beans.IntrospectionException; import java.beans.Introspector; import java.beans.PropertyDescriptor; import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; /** * General utility methods for unit and integration tests. */ public class TestUtil { /** * Assert that the given fields list contains the field with the given name. * On assertion failure index isn't naturally returned. * @param modelFields Model to check * @param fieldName Field name * @return Index in which the found field is. */ public static int assertFieldFoundOnce( List<ModelField> modelFields, String fieldName ) { int foundIndex = -1; for( int i = 0; i < modelFields.size(); ++i ) { final ModelField field = modelFields.get(i); if( field.getName().equals(fieldName) ) { if( foundIndex >= 0 ) { Assert.assertTrue("Field <" + fieldName + "> found more than once.", false); } foundIndex = i; } } if( foundIndex < 0 ) { Assert.assertTrue("Field with name <" + fieldName + "> not found.", false); return -1; // Never returned due to assertion } else { return foundIndex; } } /** * Assert that the given fields list doesn't contain the field with the given name. * On assertion failure index isn't naturally returned. * @param modelFields Model to check * @param fieldName Field name */ public static void assertFieldNotFound(List<ModelField> modelFields, String fieldName) { for( ModelField field : modelFields ) { if( field.getName().equals(fieldName) ) { Assert.assertTrue("Field with name <" + fieldName + "> found when it should be ignored.", false); } } } /** * Generates a List from the given objects * @param objects Object to include in the list * @param <T> Type of list * @return List containing the given objects. Empty list if no objects given. */ public static <T>List<T> listFromObjects(T... objects) { List<T> arrayList = new ArrayList<T>(objects.length); Collections.addAll(arrayList, objects); return arrayList; } /** * Generates a List from the given objects * @param objects Object to include in the list * @param <T> Type of list * @return List containing the given objects. Empty list if no objects given. */ public static <T>Collection<T> collectionFromObjects(T... objects) { Collection<T> arrayList = new ArrayList<T>(objects.length); Collections.addAll(arrayList, objects); return arrayList; } /** * Generates a Model field with the given annotations. Model field * default value will be "Test value" and the type of the model field * will be int.class. * @param validators Annotations * @return Model field with given annotations */ public static ModelField getField(List<FieldAnnotation> validators) { class T {int field;} ModelField field = new ModelField(T.class.getDeclaredFields()[0], validators); return field; } /** * Adds quotation marks around the given string * @param stringToQuote String to quote * @return Given string with quotation marks around it */ public static String quoteString(String stringToQuote) { return "\"" + stringToQuote + "\""; } /** * Creates a new annotation bag with given annotations. If none given, * annotation bag will be empty. * @param annotations Annotations * @return Annotation bag with annotations */ public static AnnotationBag generateAnnotationBag( Annotation... annotations ) { AnnotationBag annotationBag = new AnnotationBag(); for( Annotation annotation : annotations ) { annotationBag.addAnnotation( new FieldAnnotation( annotation ) ); } return annotationBag; } /** * Returns the annotations from the nth field of given class. Throws exceptions * if the indices are not in range. * @param clazz Class * @param fieldIndex Index of the field * @return Annotation */ public static Annotation[] getAnnotationsFromClassField(Class clazz, int fieldIndex) { return getFieldFromClass(clazz, fieldIndex).getDeclaredAnnotations(); } /** * Returns the nth annotation from the mth field of given class. Throws exceptions * if the indices are not in range. * @param clazz Class * @param fieldIndex Index of the field * @param annotationIndex Index of the annotation in finalIndex field * @return Annotation */ public static Annotation getAnnotationFromClass(Class clazz, int fieldIndex, int annotationIndex) { return getAnnotationsFromClassField(clazz, fieldIndex)[annotationIndex]; } public static Field getFieldFromClass(Class clazz, int fieldIndex) { return clazz.getDeclaredFields()[fieldIndex]; } public static PropertyDescriptor getPropertyFromClass(Class clazz, String propertyName) { try { final BeanInfo beanInfo = Introspector.getBeanInfo( clazz ); for( PropertyDescriptor property : beanInfo.getPropertyDescriptors() ) { if( propertyName.equals(property.getName()) ) { return property; } } throw new RuntimeException("No property with name " + propertyName + " + found"); } catch( IntrospectionException e ) { throw new RuntimeException(e); } } }
apache-2.0
Nodstuff/hapi-fhir
hapi-fhir-structures-hl7org-dstu2/src/main/java/org/hl7/fhir/instance/model/valuesets/FmConditions.java
2815
package org.hl7.fhir.instance.model.valuesets; /* Copyright (c) 2011+, HL7, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of HL7 nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // Generated on Sat, Aug 22, 2015 23:00-0400 for FHIR v0.5.0 public enum FmConditions { /** * Headache */ _123987, /** * added to help the parsers */ NULL; public static FmConditions fromCode(String codeString) throws Exception { if (codeString == null || "".equals(codeString)) return null; if ("123987".equals(codeString)) return _123987; throw new Exception("Unknown FmConditions code '"+codeString+"'"); } public String toCode() { switch (this) { case _123987: return "123987"; default: return "?"; } } public String getSystem() { return "http://hl7.org/fhir/fm-conditions"; } public String getDefinition() { switch (this) { case _123987: return "Headache"; default: return "?"; } } public String getDisplay() { switch (this) { case _123987: return "Headache"; default: return "?"; } } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-codecommit/src/main/java/com/amazonaws/services/codecommit/model/transform/GetBranchRequestMarshaller.java
2304
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.codecommit.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.codecommit.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * GetBranchRequestMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class GetBranchRequestMarshaller { private static final MarshallingInfo<String> REPOSITORYNAME_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("repositoryName").build(); private static final MarshallingInfo<String> BRANCHNAME_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("branchName").build(); private static final GetBranchRequestMarshaller instance = new GetBranchRequestMarshaller(); public static GetBranchRequestMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(GetBranchRequest getBranchRequest, ProtocolMarshaller protocolMarshaller) { if (getBranchRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getBranchRequest.getRepositoryName(), REPOSITORYNAME_BINDING); protocolMarshaller.marshall(getBranchRequest.getBranchName(), BRANCHNAME_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
nmohamad/bridgepoint
src/org.xtuml.bp.ui.xtext.masl/src-gen/org/xtuml/bp/ui/xtext/validation/AbstractMASLValidator.java
523
/* * generated by Xtext 2.9.1 */ package org.xtuml.bp.ui.xtext.validation; import java.util.ArrayList; import java.util.List; import org.eclipse.emf.ecore.EPackage; import org.eclipse.xtext.validation.AbstractDeclarativeValidator; public abstract class AbstractMASLValidator extends AbstractDeclarativeValidator { @Override protected List<EPackage> getEPackages() { List<EPackage> result = new ArrayList<EPackage>(); result.add(org.xtuml.bp.ui.xtext.myDsl.MyDslPackage.eINSTANCE); return result; } }
apache-2.0
tbrooks8/Agrona
agrona/src/test/java/org/agrona/concurrent/AgentInvokerTest.java
5353
/* * Copyright 2014-2018 Real Logic Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.agrona.concurrent; import org.agrona.ErrorHandler; import org.agrona.LangUtil; import org.agrona.concurrent.status.AtomicCounter; import org.junit.Test; import org.mockito.Mockito; import java.nio.channels.ClosedByInterruptException; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.*; public class AgentInvokerTest { private final ErrorHandler mockErrorHandler = mock(ErrorHandler.class); private final AtomicCounter mockAtomicCounter = mock(AtomicCounter.class); private final Agent mockAgent = mock(Agent.class); private final AgentInvoker invoker = new AgentInvoker(mockErrorHandler, mockAtomicCounter, mockAgent); @Test public void shouldFollowLifecycle() throws Exception { invoker.start(); invoker.start(); verify(mockAgent, times(1)).onStart(); verifyNoMoreInteractions(mockAgent); invoker.invoke(); invoker.invoke(); verify(mockAgent, times(2)).doWork(); verifyNoMoreInteractions(mockAgent); invoker.close(); invoker.close(); verify(mockAgent, times(1)).onClose(); verifyNoMoreInteractions(mockAgent); } @Test public void shouldReturnAgent() { assertThat(invoker.agent(), is(mockAgent)); } @Test public void shouldNotDoWorkOnClosedRunnerButCallOnClose() throws Exception { invoker.close(); invoker.invoke(); verify(mockAgent, never()).onStart(); verify(mockAgent, never()).doWork(); verify(mockErrorHandler, never()).onError(any()); verify(mockAtomicCounter, never()).increment(); verify(mockAgent).onClose(); } @Test public void shouldReportExceptionThrownByAgent() throws Exception { final RuntimeException expectedException = new RuntimeException(); when(mockAgent.doWork()).thenThrow(expectedException); invoker.start(); invoker.invoke(); verify(mockAgent).doWork(); verify(mockErrorHandler).onError(expectedException); verify(mockAtomicCounter).increment(); verify(mockAgent, never()).onClose(); reset(mockAgent); invoker.invoke(); verify(mockAgent).doWork(); reset(mockAgent); invoker.close(); verify(mockAgent, never()).doWork(); verify(mockAgent).onClose(); } @Test public void shouldReportExceptionThrownOnStart() throws Exception { final RuntimeException expectedException = new RuntimeException(); Mockito.doThrow(expectedException).when(mockAgent).onStart(); invoker.start(); invoker.invoke(); verify(mockAgent, never()).doWork(); verify(mockErrorHandler).onError(expectedException); verify(mockAgent).onClose(); assertTrue(invoker.isStarted()); assertFalse(invoker.isRunning()); assertTrue(invoker.isClosed()); } @Test public void shouldHandleAgentTerminationExceptionThrownByAgent() throws Exception { final RuntimeException expectedException = new AgentTerminationException(); when(mockAgent.doWork()).thenThrow(expectedException); invoker.start(); invoker.invoke(); verify(mockAgent).doWork(); verify(mockErrorHandler).onError(expectedException); verify(mockAtomicCounter).increment(); verify(mockAgent).onClose(); assertTrue(invoker.isClosed()); reset(mockAgent); invoker.invoke(); verify(mockAgent, never()).doWork(); assertTrue(invoker.isClosed()); } @Test public void shouldNotReportClosedByInterruptException() throws Exception { when(mockAgent.doWork()).thenThrow(new ClosedByInterruptException()); assertExceptionNotReported(); } @Test public void shouldNotReportRethrownClosedByInterruptException() throws Exception { when(mockAgent.doWork()).thenAnswer( (inv) -> { try { throw new ClosedByInterruptException(); } catch (final ClosedByInterruptException ex) { LangUtil.rethrowUnchecked(ex); } return null; }); assertExceptionNotReported(); } private void assertExceptionNotReported() { invoker.start(); invoker.invoke(); invoker.close(); verify(mockErrorHandler, never()).onError(any()); verify(mockAtomicCounter, never()).increment(); } }
apache-2.0
andytaylor/activemq-artemis
artemis-commons/src/test/java/org/apache/activemq/artemis/utils/Wait.java
6042
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.utils; import java.util.concurrent.TimeUnit; import org.junit.Assert; /** * Utility adapted from: org.apache.activemq.util.Wait */ public class Wait { public static final long MAX_WAIT_MILLIS = 30 * 1000; public static final int SLEEP_MILLIS = 100; public static final String DEFAULT_FAILURE_MESSAGE = "Condition wasn't met"; public interface Condition { boolean isSatisfied() throws Exception; } public interface LongCondition { long getCount() throws Exception; } public interface ObjectCondition { Object getObject() throws Exception; } public interface IntCondition { int getCount() throws Exception; } public static boolean waitFor(Condition condition) throws Exception { return waitFor(condition, MAX_WAIT_MILLIS); } public static void assertEquals(Object obj, ObjectCondition condition) throws Exception { assertEquals(obj, condition, MAX_WAIT_MILLIS, SLEEP_MILLIS); } public static void assertEquals(long size, LongCondition condition) throws Exception { assertEquals(size, condition, MAX_WAIT_MILLIS); } public static void assertEquals(long size, LongCondition condition, long timeout) throws Exception { assertEquals(size, condition, timeout, SLEEP_MILLIS); } public static void assertEquals(Long size, LongCondition condition, long timeout, long sleepMillis) throws Exception { boolean result = waitFor(() -> condition.getCount() == size, timeout, sleepMillis); if (!result) { Assert.fail(size + " != " + condition.getCount()); } } public static void assertEquals(int size, IntCondition condition) throws Exception { assertEquals(size, condition, MAX_WAIT_MILLIS); } public static void assertEquals(int size, IntCondition condition, long timeout) throws Exception { assertEquals(size, condition, timeout, SLEEP_MILLIS); } public static void assertEquals(Object obj, ObjectCondition condition, long timeout, long sleepMillis) throws Exception { boolean result = waitFor(() -> (obj == condition || obj.equals(condition.getObject())), timeout, sleepMillis); if (!result) { Assert.assertEquals(obj, condition.getObject()); } } public static void assertEquals(int size, IntCondition condition, long timeout, long sleepMillis) throws Exception { boolean result = waitFor(() -> condition.getCount() == size, timeout, sleepMillis); if (!result) { Assert.fail(size + " != " + condition.getCount()); } } public static void assertTrue(Condition condition) { assertTrue(DEFAULT_FAILURE_MESSAGE, condition); } public static void assertFalse(Condition condition) throws Exception { assertTrue(() -> !condition.isSatisfied()); } public static void assertFalse(String failureMessage, Condition condition) { assertTrue(failureMessage, () -> !condition.isSatisfied()); } public static void assertFalse(String failureMessage, Condition condition, final long duration) { assertTrue(failureMessage, () -> !condition.isSatisfied(), duration, SLEEP_MILLIS); } public static void assertFalse(Condition condition, final long duration, final long sleep) { assertTrue(DEFAULT_FAILURE_MESSAGE, () -> !condition.isSatisfied(), duration, sleep); } public static void assertTrue(Condition condition, final long duration) { assertTrue(DEFAULT_FAILURE_MESSAGE, condition, duration, SLEEP_MILLIS); } public static void assertTrue(String failureMessage, Condition condition) { assertTrue(failureMessage, condition, MAX_WAIT_MILLIS); } public static void assertTrue(String failureMessage, Condition condition, final long duration) { assertTrue(failureMessage, condition, duration, SLEEP_MILLIS); } public static void assertTrue(Condition condition, final long duration, final long sleep) throws Exception { assertTrue(DEFAULT_FAILURE_MESSAGE, condition, duration, sleep); } public static void assertTrue(String failureMessage, Condition condition, final long duration, final long sleep) { boolean result = waitFor(condition, duration, sleep); if (!result) { Assert.fail(failureMessage); } } public static boolean waitFor(final Condition condition, final long duration) throws Exception { return waitFor(condition, duration, SLEEP_MILLIS); } public static boolean waitFor(final Condition condition, final long durationMillis, final long sleepMillis) { try { final long expiry = System.currentTimeMillis() + durationMillis; boolean conditionSatisified = condition.isSatisfied(); while (!conditionSatisified && System.currentTimeMillis() < expiry) { if (sleepMillis == 0) { Thread.yield(); } else { TimeUnit.MILLISECONDS.sleep(sleepMillis); } conditionSatisified = condition.isSatisfied(); } return conditionSatisified; } catch (Exception e) { throw new IllegalStateException(e); } } }
apache-2.0
AlexEkitin/java_barancev
sandbox/src/test/java/ru/stqa/pft/sandbox/PrimeTests.java
503
package ru.stqa.pft.sandbox; import org.testng.Assert; import org.testng.annotations.Test; public class PrimeTests { @Test public void testPrimes() { Assert.assertTrue(Primes.isPrimeFast(Integer.MAX_VALUE)); } @Test(enabled = false) public void testPrimesLong() { long n = Integer.MAX_VALUE; Assert.assertTrue(Primes.isPrime(n)); } @Test public void testNonPrimes() { Assert.assertFalse(Primes.isPrime(Integer.MAX_VALUE-2)); } }
apache-2.0
forkunited/ARKWater
src/main/java/ark/model/annotator/nlp/AnnotatorToken.java
222
package ark.model.annotator.nlp; import ark.data.annotation.nlp.DocumentNLP; import ark.model.annotator.Annotator; public interface AnnotatorToken<T> extends Annotator<T> { T[][] annotate(DocumentNLP document); }
apache-2.0
rvansa/JGroups
src/org/jgroups/auth/SimpleToken.java
2854
package org.jgroups.auth; import org.jgroups.Message; import org.jgroups.annotations.Property; import org.jgroups.util.Util; import java.io.DataInput; import java.io.DataOutput; /** * <p> * This is an example of using a preshared token for authentication purposes. All members of the * group have to have the same string value in the JGroups config. * </p> * <p> * JGroups config parameters: * </p> * <ul> * <li>auth_value (required) = the string to encrypt</li> * </ul> * * @author Chris Mills * @see org.jgroups.auth.AuthToken */ public class SimpleToken extends AuthToken { @Property private String auth_value = null; public SimpleToken() { // need an empty constructor } public SimpleToken(String authvalue) { this.auth_value = authvalue; } public String getName() { return "org.jgroups.auth.SimpleToken"; } public String getAuthValue() { return auth_value; } public void setAuthValue(String auth_value) { this.auth_value = auth_value; } public boolean authenticate(AuthToken token, Message msg) { if ((token != null) && (token instanceof SimpleToken)) { // Found a valid Token to authenticate against SimpleToken serverToken = (SimpleToken) token; if ((this.auth_value != null) && (serverToken.auth_value != null) && (this.auth_value.equalsIgnoreCase(serverToken.auth_value))) { // validated if (log.isDebugEnabled()) { log.debug("SimpleToken match"); } return true; } else { // if(log.isWarnEnabled()){ // log.warn("Authentication failed on SimpleToken"); // } return false; } } if (log.isWarnEnabled()) { log.warn("Invalid AuthToken instance - wrong type or null"); } return false; } /** * Required to serialize the object to pass across the wire * * * * @param out * @throws Exception */ public void writeTo(DataOutput out) throws Exception { if (log.isDebugEnabled()) { log.debug("SimpleToken writeTo()"); } Util.writeString(this.auth_value, out); } /** * Required to deserialize the object when read in from the wire * * * * @param in * @throws Exception */ public void readFrom(DataInput in) throws Exception { if (log.isDebugEnabled()) { log.debug("SimpleToken readFrom()"); } this.auth_value = Util.readString(in); } public int size() { return Util.size(auth_value); } public String toString() { return "auth_value=" + auth_value; } }
apache-2.0
WorkerFramework/worker-framework
util-rabbitmq/src/test/java/com/hpe/caf/util/rabbitmq/DefaultRabbitConsumerTest.java
5621
/* * Copyright 2015-2021 Micro Focus or one of its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hpe.caf.util.rabbitmq; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Objects; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import org.mockito.Mockito; import org.testng.Assert; import org.testng.annotations.Test; import org.testng.internal.junit.ArrayAsserts; import com.rabbitmq.client.Envelope; public class DefaultRabbitConsumerTest { private static final int TEST_TIMEOUT_MS = 5000; @Test public void testProcessDelivery() throws InterruptedException, IOException { BlockingQueue<Event<QueueConsumer>> events = new LinkedBlockingQueue<>(); CountDownLatch latch = new CountDownLatch(1); TestQueueConsumerImpl impl = new TestQueueConsumerImpl(latch); DefaultRabbitConsumer con = new DefaultRabbitConsumer(events, impl); new Thread(con).start(); long tag = 100L; byte[] body = "data".getBytes(StandardCharsets.UTF_8); Envelope env = Mockito.mock(Envelope.class); Mockito.when(env.getDeliveryTag()).thenReturn(tag); events.offer(new ConsumerDeliverEvent(new Delivery(env, body))); Assert.assertTrue(latch.await(TEST_TIMEOUT_MS, TimeUnit.MILLISECONDS)); ArrayAsserts.assertArrayEquals(body, impl.getLastDelivery().getMessageData()); } @Test public void testProcessAck() throws InterruptedException { BlockingQueue<Event<QueueConsumer>> events = new LinkedBlockingQueue<>(); CountDownLatch latch = new CountDownLatch(1); TestQueueConsumerImpl impl = new TestQueueConsumerImpl(latch); DefaultRabbitConsumer con = new DefaultRabbitConsumer(events, impl); new Thread(con).start(); long tag = 100L; events.offer(new ConsumerAckEvent(tag)); Assert.assertTrue(latch.await(TEST_TIMEOUT_MS, TimeUnit.MILLISECONDS)); Assert.assertEquals(tag, impl.getLastTag()); } @Test public void testProcessReject() throws InterruptedException { BlockingQueue<Event<QueueConsumer>> events = new LinkedBlockingQueue<>(); CountDownLatch latch = new CountDownLatch(1); TestQueueConsumerImpl impl = new TestQueueConsumerImpl(latch); DefaultRabbitConsumer con = new DefaultRabbitConsumer(events, impl); new Thread(con).start(); long tag = 100L; events.offer(new ConsumerRejectEvent(tag)); Assert.assertTrue(latch.await(TEST_TIMEOUT_MS, TimeUnit.MILLISECONDS)); Assert.assertEquals(tag, impl.getLastTag()); } @Test public void testProcessDrop() throws InterruptedException { BlockingQueue<Event<QueueConsumer>> events = new LinkedBlockingQueue<>(); CountDownLatch latch = new CountDownLatch(1); TestQueueConsumerImpl impl = new TestQueueConsumerImpl(latch); DefaultRabbitConsumer con = new DefaultRabbitConsumer(events, impl); new Thread(con).start(); long tag = 100L; events.offer(new ConsumerDropEvent(tag)); Assert.assertTrue(latch.await(TEST_TIMEOUT_MS, TimeUnit.MILLISECONDS)); Assert.assertEquals(tag, impl.getLastTag()); } @Test public void testHandleShutdown() throws InterruptedException { BlockingQueue<Event<QueueConsumer>> events = new LinkedBlockingQueue<>(); CountDownLatch latch = new CountDownLatch(1); TestQueueConsumerImpl impl = new TestQueueConsumerImpl(latch); DefaultRabbitConsumer con = new DefaultRabbitConsumer(events, impl); new Thread(con).start(); long tag = 100L; con.shutdown(); events.offer(new ConsumerDropEvent(tag)); Assert.assertFalse(latch.await(TEST_TIMEOUT_MS, TimeUnit.MILLISECONDS)); } private static class TestQueueConsumerImpl implements QueueConsumer { private final CountDownLatch latch; private Delivery lastDelivery; private long lastTag; public TestQueueConsumerImpl(final CountDownLatch latch) { this.latch = Objects.requireNonNull(latch); } @Override public void processDelivery(Delivery delivery) { lastDelivery = delivery; latch.countDown(); } @Override public void processAck(long tag) { lastTag = tag; latch.countDown(); } @Override public void processReject(long tag) { lastTag = tag; latch.countDown(); } @Override public void processDrop(long tag) { lastTag = tag; latch.countDown(); } public Delivery getLastDelivery() { return lastDelivery; } public long getLastTag() { return lastTag; } } }
apache-2.0
programmerr47/vk-mobile-challenge
vksdk_library/src/main/java/com/vk/sdk/api/model/special/VkGroupType.java
892
package com.vk.sdk.api.model.special; /** * @author Michael Spitsin * @since 2016-01-10 */ public enum VkGroupType { GROUP(0, "group"), PAGE(1,"page"), EVENT(2, "event"); private int id; private String name; VkGroupType(int id, String name) { this.id = id; this.name = name; } public int getId() { return id; } public String getName() { return name; } public static VkGroupType fromName(String name) { for (VkGroupType type : VkGroupType.values()) { if (type.name.equals(name)) { return type; } } return GROUP; } public static VkGroupType fromId(int id) { for (VkGroupType type : VkGroupType.values()) { if (type.id == id) { return type; } } return GROUP; } }
apache-2.0
Silverpop/mobile-connector-sdk-android
engage/src/main/java/com/silverpop/engage/recipient/CheckIdentityHandler.java
345
package com.silverpop.engage.recipient; import com.silverpop.engage.Handler; /** * Created by Lindsay Thurmond on 1/6/15. */ public interface CheckIdentityHandler extends Handler<CheckIdentityResult, CheckIdentityFailure> { public void onSuccess(CheckIdentityResult result); public void onFailure(CheckIdentityFailure failure); }
apache-2.0
condorcraft110/ManipulativeTechnomancy
src/main/java/net/cc110/mtech/ItemCore.java
836
package net.cc110.mtech; import java.util.*; import net.minecraft.item.*; import net.minecraft.creativetab.*; import net.minecraft.entity.player.*; public class ItemCore extends Item { public ItemCore() { super(); setHasSubtypes(true); setMaxStackSize(1); } public void addInformation(ItemStack stack, EntityPlayer player, List list, boolean b) { list.add(PowerRegistry.getLocalFocusName(NBTHelper.getStackString(stack, "FocusID"))); } public void getSubItems(Item item, CreativeTabs tab, List list) { Iterator<String> iterator = PowerRegistry.idIterator(); while(iterator.hasNext()) { String focusID = iterator.next(); ItemStack stack = new ItemStack(item, 1, 0); NBTHelper.setStackString(stack, "FocusID", focusID); list.add(stack); } } }
apache-2.0
oliveti/resolver
api-maven/src/main/java/org/jboss/shrinkwrap/resolver/api/maven/MavenResolverSystem.java
1411
/* * JBoss, Home of Professional Open Source * Copyright 2012, Red Hat Middleware LLC, and individual contributors * by the @authors tag. See the copyright.txt in the distribution for a * full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.shrinkwrap.resolver.api.maven; import org.jboss.shrinkwrap.resolver.api.Resolvers; /** * Entry point of a Maven-based Resolver system which does not support configuration. To create a new instance, pass in * this class reference to {@link Resolvers#use(Class)} or {@link Resolvers#use(Class, ClassLoader)}. * * @author <a href="mailto:alr@jboss.org">Andrew Lee Rubinger</a> * @author <a href="mailto:kpiwko@redhat.com">Karel Piwko</a> */ public interface MavenResolverSystem extends MavenResolverSystemBase<PomEquippedResolveStage, PomlessResolveStage, MavenStrategyStage, MavenFormatStage> { }
apache-2.0
michaelmarconi/oncue
oncue-common/src/main/java/oncue/common/messages/RerunJob.java
1313
/******************************************************************************* * Copyright 2013 Michael Marconi * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package oncue.common.messages; import java.io.Serializable; /** * This message is sent to the scheduler in order to enqueue another run of an * existing job. */ public class RerunJob implements Serializable { private static final long serialVersionUID = -5312109301345363059L; private long id; public RerunJob() { } public RerunJob(long id) { this.setId(id); } public long getId() { return id; } public void setId(long id) { this.id = id; } @Override public String toString() { return String.format("Re-run job %s", id); } }
apache-2.0
quarkusio/quarkus
integration-tests/rest-client-reactive-stork/src/test/java/io/quarkus/it/rest/reactive/stork/RestClientReactiveStorkTest.java
1909
package io.quarkus.it.rest.reactive.stork; import static io.quarkus.it.rest.reactive.stork.FastWiremockServer.FAST_RESPONSE; import static io.quarkus.it.rest.reactive.stork.SlowWiremockServer.SLOW_RESPONSE; import static io.restassured.RestAssured.when; import static org.assertj.core.api.Assertions.assertThat; import java.util.HashSet; import java.util.Set; import org.junit.jupiter.api.Test; import io.quarkus.arc.Arc; import io.quarkus.it.rest.client.reactive.stork.MyServiceDiscoveryProvider; import io.quarkus.test.common.QuarkusTestResource; import io.quarkus.test.junit.DisabledOnNativeImage; import io.quarkus.test.junit.QuarkusTest; import io.restassured.response.Response; import io.vertx.core.Vertx; @QuarkusTest @QuarkusTestResource(SlowWiremockServer.class) @QuarkusTestResource(FastWiremockServer.class) public class RestClientReactiveStorkTest { @Test @DisabledOnNativeImage void shouldUseQuarkusVertxInstance() { Vertx providedVertx = MyServiceDiscoveryProvider.providedVertx; assertThat(providedVertx).isNotNull() .isEqualTo(Arc.container().instance(Vertx.class).get()); } @Test void shouldUseFasterService() { Set<String> responses = new HashSet<>(); for (int i = 0; i < 2; i++) { Response response = when().get("/client"); response.then().statusCode(200); responses.add(response.asString()); } assertThat(responses).contains(FAST_RESPONSE, SLOW_RESPONSE); responses.clear(); for (int i = 0; i < 3; i++) { Response response = when().get("/client"); response.then().statusCode(200); responses.add(response.asString()); } // after hitting the slow endpoint, we should only use the fast one: assertThat(responses).containsOnly(FAST_RESPONSE, FAST_RESPONSE, FAST_RESPONSE); } }
apache-2.0
ashigeru/asakusafw-compiler
vanilla/runtime/core/src/test/java/com/asakusafw/vanilla/core/io/ByteBufferReaderTest.java
1556
/** * Copyright 2011-2019 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.vanilla.core.io; import static com.asakusafw.vanilla.core.testing.BufferTestUtil.*; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; import java.nio.ByteBuffer; import org.junit.Test; /** * Test for {@link ByteBufferReader}. */ public class ByteBufferReaderTest { /** * simple case. * @throws Exception if failed */ @Test public void simple() throws Exception { try (DataReader reader = new ByteBufferReader(buffer("Hello, world!"))) { assertThat(read(reader), is("Hello, world!")); } } /** * w/ direct buffer. * @throws Exception if failed */ @Test public void direct() throws Exception { try (DataReader reader = new ByteBufferReader(buffer("Hello, world!"))) { ByteBuffer direct = reader.getBuffer(); assertThat(direct, is(buffer("Hello, world!"))); } } }
apache-2.0
clemble/backend-common
template/src/main/java/com/clemble/casino/server/template/MustacheTemplateService.java
1478
package com.clemble.casino.server.template; import com.google.common.cache.CacheBuilder; import com.google.common.cache.CacheLoader; import com.google.common.cache.LoadingCache; import com.samskivert.mustache.Mustache; import com.samskivert.mustache.Template; import org.springframework.core.io.ClassPathResource; import java.io.InputStream; import java.io.InputStreamReader; import java.util.Map; import java.util.concurrent.ExecutionException; /** * Created by mavarazy on 12/30/14. */ public class MustacheTemplateService implements TemplateService { final private LoadingCache<String, Template> CACHE = CacheBuilder.newBuilder().build(new CacheLoader<String, Template>(){ @Override public Template load(String key) throws Exception { ClassPathResource tmplResource = new ClassPathResource("/templates/" + key + ".hbs"); InputStream is = tmplResource.getInputStream(); try { return Mustache.compiler().compile(new InputStreamReader(is)); } finally { is.close(); } } }); @Override public String produce(String key, Map<String, String> params) { try { // Step 1. Fetching template Template template = CACHE.get(key); // Step 2. Execute template return template.execute(params); } catch (ExecutionException e) { throw new RuntimeException(e); } } }
apache-2.0
atmelino/JATexperimental
src/jat/application/DE405Propagator/ParameterSet/testOrbit.java
659
package jat.application.DE405Propagator.ParameterSet; import jat.application.DE405Propagator.DE405PropagatorParameters; import jat.core.ephemeris.DE405Body.body; import jat.core.ephemeris.DE405Frame.frame; import jat.core.spacetime.TimeAPL; public class testOrbit extends DE405PropagatorParameters{ public testOrbit() { super(); bodyGravOnOff[body.SUN.ordinal()] = true; // bodyGravOnOff[body.MARS.ordinal()] = true; // bodyGravOnOff[body.JUPITER.ordinal()] = true; Frame=frame.HEE; // Frame=frame.ICRF; simulationDate = new TimeAPL(2003, 3, 1, 12, 0, 0); y0[0] = 150.; y0[1] = 0; y0[2] = 0; y0[4] = 40; tf = 10.; } }
apache-2.0
adamjshook/accumulo
examples/simple/src/test/java/org/apache/accumulo/examples/simple/filedata/ChunkInputFormatTest.java
11378
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.accumulo.examples.simple.filedata; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.Map.Entry; import org.apache.accumulo.core.client.BatchWriter; import org.apache.accumulo.core.client.BatchWriterConfig; import org.apache.accumulo.core.client.Connector; import org.apache.accumulo.core.client.mock.MockInstance; import org.apache.accumulo.core.client.security.tokens.PasswordToken; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Mutation; import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.security.Authorizations; import org.apache.accumulo.core.security.ColumnVisibility; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.junit.BeforeClass; import org.junit.Test; public class ChunkInputFormatTest { private static AssertionError e0 = null; private static AssertionError e1 = null; private static AssertionError e2 = null; private static IOException e3 = null; private static final Authorizations AUTHS = new Authorizations("A", "B", "C", "D"); private static List<Entry<Key,Value>> data; private static List<Entry<Key,Value>> baddata; @BeforeClass public static void setupClass() { System.setProperty("hadoop.tmp.dir", System.getProperty("user.dir") + "/target/hadoop-tmp"); data = new ArrayList<>(); ChunkInputStreamTest.addData(data, "a", "refs", "ida\0ext", "A&B", "ext"); ChunkInputStreamTest.addData(data, "a", "refs", "ida\0name", "A&B", "name"); ChunkInputStreamTest.addData(data, "a", "~chunk", 100, 0, "A&B", "asdfjkl;"); ChunkInputStreamTest.addData(data, "a", "~chunk", 100, 1, "A&B", ""); ChunkInputStreamTest.addData(data, "b", "refs", "ida\0ext", "A&B", "ext"); ChunkInputStreamTest.addData(data, "b", "refs", "ida\0name", "A&B", "name"); ChunkInputStreamTest.addData(data, "b", "~chunk", 100, 0, "A&B", "qwertyuiop"); ChunkInputStreamTest.addData(data, "b", "~chunk", 100, 0, "B&C", "qwertyuiop"); ChunkInputStreamTest.addData(data, "b", "~chunk", 100, 1, "A&B", ""); ChunkInputStreamTest.addData(data, "b", "~chunk", 100, 1, "B&C", ""); ChunkInputStreamTest.addData(data, "b", "~chunk", 100, 1, "D", ""); baddata = new ArrayList<>(); ChunkInputStreamTest.addData(baddata, "c", "refs", "ida\0ext", "A&B", "ext"); ChunkInputStreamTest.addData(baddata, "c", "refs", "ida\0name", "A&B", "name"); } public static void entryEquals(Entry<Key,Value> e1, Entry<Key,Value> e2) { assertEquals(e1.getKey(), e2.getKey()); assertEquals(e1.getValue(), e2.getValue()); } public static class CIFTester extends Configured implements Tool { public static class TestMapper extends Mapper<List<Entry<Key,Value>>,InputStream,List<Entry<Key,Value>>,InputStream> { int count = 0; @Override protected void map(List<Entry<Key,Value>> key, InputStream value, Context context) throws IOException, InterruptedException { byte[] b = new byte[20]; int read; try { switch (count) { case 0: assertEquals(key.size(), 2); entryEquals(key.get(0), data.get(0)); entryEquals(key.get(1), data.get(1)); assertEquals(read = value.read(b), 8); assertEquals(new String(b, 0, read), "asdfjkl;"); assertEquals(read = value.read(b), -1); break; case 1: assertEquals(key.size(), 2); entryEquals(key.get(0), data.get(4)); entryEquals(key.get(1), data.get(5)); assertEquals(read = value.read(b), 10); assertEquals(new String(b, 0, read), "qwertyuiop"); assertEquals(read = value.read(b), -1); break; default: assertTrue(false); } } catch (AssertionError e) { e1 = e; } finally { value.close(); } count++; } @Override protected void cleanup(Context context) throws IOException, InterruptedException { try { assertEquals(2, count); } catch (AssertionError e) { e2 = e; } } } public static class TestNoClose extends Mapper<List<Entry<Key,Value>>,InputStream,List<Entry<Key,Value>>,InputStream> { int count = 0; @Override protected void map(List<Entry<Key,Value>> key, InputStream value, Context context) throws IOException, InterruptedException { byte[] b = new byte[5]; int read; try { switch (count) { case 0: assertEquals(read = value.read(b), 5); assertEquals(new String(b, 0, read), "asdfj"); break; default: assertTrue(false); } } catch (AssertionError e) { e1 = e; } count++; try { context.nextKeyValue(); assertTrue(false); } catch (IOException ioe) { e3 = ioe; } } } public static class TestBadData extends Mapper<List<Entry<Key,Value>>,InputStream,List<Entry<Key,Value>>,InputStream> { @Override protected void map(List<Entry<Key,Value>> key, InputStream value, Context context) throws IOException, InterruptedException { byte[] b = new byte[20]; try { assertEquals(key.size(), 2); entryEquals(key.get(0), baddata.get(0)); entryEquals(key.get(1), baddata.get(1)); } catch (AssertionError e) { e0 = e; } try { assertFalse(value.read(b) > 0); try { fail(); } catch (AssertionError e) { e1 = e; } } catch (Exception e) { // expected, ignore } try { value.close(); try { fail(); } catch (AssertionError e) { e2 = e; } } catch (Exception e) { // expected, ignore } } } @Override public int run(String[] args) throws Exception { if (args.length != 5) { throw new IllegalArgumentException("Usage : " + CIFTester.class.getName() + " <instance name> <user> <pass> <table> <mapperClass>"); } String instance = args[0]; String user = args[1]; String pass = args[2]; String table = args[3]; Job job = Job.getInstance(getConf()); job.setJobName(this.getClass().getSimpleName() + "_" + System.currentTimeMillis()); job.setJarByClass(this.getClass()); job.setInputFormatClass(ChunkInputFormat.class); ChunkInputFormat.setConnectorInfo(job, user, new PasswordToken(pass)); ChunkInputFormat.setInputTableName(job, table); ChunkInputFormat.setScanAuthorizations(job, AUTHS); ChunkInputFormat.setMockInstance(job, instance); @SuppressWarnings("unchecked") Class<? extends Mapper<?,?,?,?>> forName = (Class<? extends Mapper<?,?,?,?>>) Class.forName(args[4]); job.setMapperClass(forName); job.setMapOutputKeyClass(Key.class); job.setMapOutputValueClass(Value.class); job.setOutputFormatClass(NullOutputFormat.class); job.setNumReduceTasks(0); job.waitForCompletion(true); return job.isSuccessful() ? 0 : 1; } public static int main(String... args) throws Exception { Configuration conf = new Configuration(); conf.set("mapreduce.cluster.local.dir", new File(System.getProperty("user.dir"), "target/mapreduce-tmp").getAbsolutePath()); return ToolRunner.run(conf, new CIFTester(), args); } } @Test public void test() throws Exception { MockInstance instance = new MockInstance("instance1"); Connector conn = instance.getConnector("root", new PasswordToken("")); conn.tableOperations().create("test"); BatchWriter bw = conn.createBatchWriter("test", new BatchWriterConfig()); for (Entry<Key,Value> e : data) { Key k = e.getKey(); Mutation m = new Mutation(k.getRow()); m.put(k.getColumnFamily(), k.getColumnQualifier(), new ColumnVisibility(k.getColumnVisibility()), k.getTimestamp(), e.getValue()); bw.addMutation(m); } bw.close(); assertEquals(0, CIFTester.main("instance1", "root", "", "test", CIFTester.TestMapper.class.getName())); assertNull(e1); assertNull(e2); } @Test public void testErrorOnNextWithoutClose() throws Exception { MockInstance instance = new MockInstance("instance2"); Connector conn = instance.getConnector("root", new PasswordToken("")); conn.tableOperations().create("test"); BatchWriter bw = conn.createBatchWriter("test", new BatchWriterConfig()); for (Entry<Key,Value> e : data) { Key k = e.getKey(); Mutation m = new Mutation(k.getRow()); m.put(k.getColumnFamily(), k.getColumnQualifier(), new ColumnVisibility(k.getColumnVisibility()), k.getTimestamp(), e.getValue()); bw.addMutation(m); } bw.close(); assertEquals(1, CIFTester.main("instance2", "root", "", "test", CIFTester.TestNoClose.class.getName())); assertNull(e1); assertNull(e2); assertNotNull(e3); } @Test public void testInfoWithoutChunks() throws Exception { MockInstance instance = new MockInstance("instance3"); Connector conn = instance.getConnector("root", new PasswordToken("")); conn.tableOperations().create("test"); BatchWriter bw = conn.createBatchWriter("test", new BatchWriterConfig()); for (Entry<Key,Value> e : baddata) { Key k = e.getKey(); Mutation m = new Mutation(k.getRow()); m.put(k.getColumnFamily(), k.getColumnQualifier(), new ColumnVisibility(k.getColumnVisibility()), k.getTimestamp(), e.getValue()); bw.addMutation(m); } bw.close(); assertEquals(0, CIFTester.main("instance3", "root", "", "test", CIFTester.TestBadData.class.getName())); assertNull(e0); assertNull(e1); assertNull(e2); } }
apache-2.0
mdbardina/java_pft
addressbook-web-tests/src/test/java/ru/stqa/pft/addressbook/tests/GroupDeleteTests.java
1089
package ru.stqa.pft.addressbook.tests; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import ru.stqa.pft.addressbook.model.GroupData; import ru.stqa.pft.addressbook.model.Groups; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; import static org.testng.Assert.assertEquals; public class GroupDeleteTests extends TestBase { @BeforeMethod public void ensureGroupPreconditions(){ if (app.db().groups().size() ==0) { app.goTo().groupPage(); app.group().create(new GroupData().withName("mashatest")); } } @Test public void testGroupDelete() { app.goTo().groupPage(); Groups before = app.db().groups(); GroupData deletedGroup = before.iterator().next(); app.group().delete(deletedGroup); assertThat(app.group().count(), equalTo(before.size() - 1)); Groups after= app.db().groups(); assertThat(after, equalTo(before.withoutAdded(deletedGroup))); verifyGroupListInUi(); } }
apache-2.0
grandeemme/v-ol3
v-ol3/src/main/java/org/vaadin/addon/vol3/client/interaction/OLSelectInteractionState.java
557
package org.vaadin.addon.vol3.client.interaction; import com.vaadin.shared.AbstractComponentState; import com.vaadin.shared.Connector; import org.vaadin.addon.vol3.client.style.OLStyle; import java.util.List; /** * Shared state for the select interaction */ public class OLSelectInteractionState extends AbstractComponentState{ // Style for selected features public List<OLStyle> featureStyles; // Layers where the selection is enabled. If not specified, all visible layers are available for selection. public List<Connector> layers; }
apache-2.0
Vilsol/NMSWrapper
src/main/java/me/vilsol/nmswrapper/wraps/unparsed/NMSEntitySmallFireball.java
2153
package me.vilsol.nmswrapper.wraps.unparsed; import me.vilsol.nmswrapper.*; import me.vilsol.nmswrapper.reflections.*; import me.vilsol.nmswrapper.wraps.*; @ReflectiveClass(name = "EntitySmallFireball") public class NMSEntitySmallFireball extends NMSEntityFireball { public NMSEntitySmallFireball(Object nmsObject){ super(nmsObject); } public NMSEntitySmallFireball(NMSWorld world){ super("EntitySmallFireball", new Object[]{NMSWorld.class}, new Object[]{world}); } public NMSEntitySmallFireball(NMSWorld world, NMSEntityLiving entityLiving, double d, double d1, double d2){ super("EntitySmallFireball", new Object[]{NMSWorld.class, NMSEntityLiving.class, double.class, double.class, double.class}, new Object[]{world, entityLiving, d, d1, d2}); } public NMSEntitySmallFireball(NMSWorld world, double d, double d1, double d2, double d3, double d4, double d5){ super("EntitySmallFireball", new Object[]{NMSWorld.class, double.class, double.class, double.class, double.class, double.class, double.class}, new Object[]{world, d, d1, d2, d3, d4, d5}); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntitySmallFireball#a(net.minecraft.server.v1_9_R1.MovingObjectPosition) */ @ReflectiveMethod(name = "a", types = {NMSMovingObjectPosition.class}) public void a(NMSMovingObjectPosition movingObjectPosition){ NMSWrapper.getInstance().exec(nmsObject, movingObjectPosition); } /** * TODO Find correct name * @see net.minecraft.server.v1_9_R1.EntitySmallFireball#ad() */ @ReflectiveMethod(name = "ad", types = {}) public boolean ad(){ return (boolean) NMSWrapper.getInstance().exec(nmsObject); } /** * @see net.minecraft.server.v1_9_R1.EntitySmallFireball#damageEntity(net.minecraft.server.v1_9_R1.DamageSource, float) */ @ReflectiveMethod(name = "damageEntity", types = {NMSDamageSource.class, float.class}) public boolean damageEntity(NMSDamageSource damageSource, float f){ return (boolean) NMSWrapper.getInstance().exec(nmsObject, damageSource, f); } }
apache-2.0
openphacts/Validator
ws-core/src/uk/ac/manchester/cs/datadesc/validator/ws/WsValidationConstants.java
2013
// OpenPHACTS RDF Validator, // A tool for validating and storing RDF. // // Copyright 2012-2013 Christian Y. A. Brenninkmeijer // Copyright 2012-2013 University of Manchester // Copyright 2012-2013 OpenPhacts // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package uk.ac.manchester.cs.datadesc.validator.ws; /** * * @author Christian */ public class WsValidationConstants { public static final String BY_RESOURCE = "byResource"; public static final String CONTEXT = "context"; public static final String FORMAT = "format"; public static final String INCLUDE_WARNINGS = "includeWarnings"; public static final String LOAD_URI = "loadUri"; public static final String OBJECT = "object"; public static final String PREDICATE = "predicate"; public static final String QUERY = "query"; public static final String RDF_FORMAT = "rdfFormat"; public static final String RDF_DUMP = "rdfDump"; public static final String RESOURCE = "resource"; public static final String TEXT = "text"; public static final String SPECIFICATION = "specification"; public static final String STATEMENT_LIST = "statementList"; public static final String SUBJECT = "subject"; public static final String SPARQL = "sparql"; public static final String TURTLE = "Turtle"; public static final String URI = "uri"; public static final String VALIDATE = "validate"; public static final String VALIDATE_HOME = "validate-home"; }
apache-2.0
jexp/idea2
java/java-impl/src/com/intellij/application/options/CodeStyleGenerationConfigurable.java
14915
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.application.options; import com.intellij.codeInsight.daemon.DaemonCodeAnalyzer; import com.intellij.openapi.application.ApplicationBundle; import com.intellij.openapi.fileTypes.StdFileTypes; import com.intellij.openapi.options.Configurable; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.ui.ListUtil; import javax.swing.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.Arrays; import java.util.Comparator; public class CodeStyleGenerationConfigurable implements Configurable { JPanel myPanel; private JTextField myFieldPrefixField; private JTextField myStaticFieldPrefixField; private JTextField myParameterPrefixField; private JTextField myLocalVariablePrefixField; private JTextField myFieldSuffixField; private JTextField myStaticFieldSuffixField; private JTextField myParameterSuffixField; private JTextField myLocalVariableSuffixField; private JCheckBox myCbPreferLongerNames; private JCheckBox myCbLineCommentAtFirstColumn; private JCheckBox myCbBlockCommentAtFirstColumn; private final MembersOrderList myMembersOrderList; private JScrollPane myMembersListScroll; private JButton myMoveUpButton; private JButton myMoveDownButton; private final CodeStyleSettings mySettings; private JCheckBox myCbGenerateFinalParameters; private JCheckBox myCbGenerateFinalLocals; private JCheckBox myCbUseExternalAnnotations; private JCheckBox myInsertOverrideAnnotationCheckBox; public CodeStyleGenerationConfigurable(CodeStyleSettings settings) { mySettings = settings; myMembersOrderList = new MembersOrderList(); } public JComponent createComponent() { myMembersListScroll.getViewport().add(myMembersOrderList); myMoveUpButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { ListUtil.moveSelectedItemsUp(myMembersOrderList); } }); myMoveDownButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { ListUtil.moveSelectedItemsDown(myMembersOrderList); } }); return myPanel; } public void disposeUIResources() { } public String getDisplayName() { return ApplicationBundle.message("title.code.generation"); } public String getHelpTopic() { return "reference.settingsdialog.IDE.globalcodestyle.codegen"; } public Icon getIcon() { return StdFileTypes.JAVA.getIcon(); } /*private JPanel createNamingPanel() { OptionGroup optionGroup = new OptionGroup("Naming"); myCbPreferLongerNames = new JCheckBox("Prefer longer names"); optionGroup.add(myCbPreferLongerNames); optionGroup.add(new JLabel("Name prefix for:")); myFieldPrefixField = new JTextField(8); optionGroup.add(new JLabel("Field"), myFieldPrefixField, true); myStaticFieldPrefixField = new JTextField(8); optionGroup.add(new JLabel("Static field"), myStaticFieldPrefixField, true); myParameterPrefixField = new JTextField(8); optionGroup.add(new JLabel("Parameter"), myParameterPrefixField, true); myLocalVariablePrefixField = new JTextField(8); optionGroup.add(new JLabel("Local variable"), myLocalVariablePrefixField, true); optionGroup.add(new JLabel("Name suffix for:")); myFieldSuffixField = new JTextField(8); optionGroup.add(new JLabel("Field"), myFieldSuffixField, true); myStaticFieldSuffixField = new JTextField(8); optionGroup.add(new JLabel("Static field"), myStaticFieldSuffixField, true); myParameterSuffixField = new JTextField(8); optionGroup.add(new JLabel("Parameter"), myParameterSuffixField, true); myLocalVariableSuffixField = new JTextField(8); optionGroup.add(new JLabel("Local variable"), myLocalVariableSuffixField, true); return optionGroup.createPanel(); } private JPanel createCommentPanel() { OptionGroup optionGroup = new OptionGroup("Comment Code"); myCbLineCommentAtFirstColumn = new JCheckBox("Line comment at first column"); optionGroup.add(myCbLineCommentAtFirstColumn); myCbBlockCommentAtFirstColumn = new JCheckBox("Block comment at first column"); optionGroup.add(myCbBlockCommentAtFirstColumn); return optionGroup.createPanel(); } private JPanel createRightMarginPanel() { OptionGroup optionGroup = new OptionGroup("Wrapping "); myRightMarginField = new JTextField(4); optionGroup.add(new JLabel("Right margin (columns)") ,myRightMarginField); return optionGroup.createPanel(); } private JPanel createLineSeparatorPanel(){ OptionGroup optionGroup = new OptionGroup("Line Separator (for new files) "); myLineSeparatorCombo = new JComboBox(); myLineSeparatorCombo.addItem(SYSTEM_DEPENDANT_STRING); myLineSeparatorCombo.addItem(UNIX_STRING); myLineSeparatorCombo.addItem(WINDOWS_STRING); myLineSeparatorCombo.addItem(MACINTOSH_STRING); optionGroup.add(myLineSeparatorCombo); return optionGroup.createPanel(); } private JPanel createKeepWhenReformattingPanel() { OptionGroup optionGroup = new OptionGroup("Keep When Reformatting"); myCbKeepLineBreaks = new JCheckBox("Line breaks"); optionGroup.add(myCbKeepLineBreaks); myCbKeepFirstColumnComment = new JCheckBox("Comment at first column"); optionGroup.add(myCbKeepFirstColumnComment); myCbKeepControlStatementInOneLine = new JCheckBox("Control statement in one line"); optionGroup.add(myCbKeepControlStatementInOneLine); return optionGroup.createPanel(); } private JPanel createMembersOrderPanel() { OptionGroup optionGroup = new OptionGroup("Order of Members"); JPanel panel = new JPanel(new GridBagLayout()); myMembersOrderList = new MembersOrderList(); panel.add(new JScrollPane(myMembersOrderList), new GridBagConstraints(0,0,1,2,1,1,GridBagConstraints.NORTH,GridBagConstraints.BOTH,new Insets(0,0,0,0), 0,0)); JButton moveUpButton = new JButton("Move Up"); moveUpButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e){ ListUtil.moveSelectedItemsUp(myMembersOrderList); } }); panel.add(moveUpButton, new GridBagConstraints(1,0,1,1,0,0,GridBagConstraints.NORTH,GridBagConstraints.HORIZONTAL,new Insets(0,5,5,0), 0,0)); JButton movDownButton = new JButton("Move Down"); moveDownButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e){ ListUtil.moveSelectedItemsDown(myMembersOrderList); } }); panel.add(movDownButton, new GridBagConstraints(1,1,1,1,0,1,GridBagConstraints.NORTH,GridBagConstraints.HORIZONTAL,new Insets(0,5,5,0), 0,0)); optionGroup.add(panel); return optionGroup.createPanel(); }*/ public void reset() { myCbPreferLongerNames.setSelected(mySettings.PREFER_LONGER_NAMES); myFieldPrefixField.setText(mySettings.FIELD_NAME_PREFIX); myStaticFieldPrefixField.setText(mySettings.STATIC_FIELD_NAME_PREFIX); myParameterPrefixField.setText(mySettings.PARAMETER_NAME_PREFIX); myLocalVariablePrefixField.setText(mySettings.LOCAL_VARIABLE_NAME_PREFIX); myFieldSuffixField.setText(mySettings.FIELD_NAME_SUFFIX); myStaticFieldSuffixField.setText(mySettings.STATIC_FIELD_NAME_SUFFIX); myParameterSuffixField.setText(mySettings.PARAMETER_NAME_SUFFIX); myLocalVariableSuffixField.setText(mySettings.LOCAL_VARIABLE_NAME_SUFFIX); myCbLineCommentAtFirstColumn.setSelected(mySettings.LINE_COMMENT_AT_FIRST_COLUMN); myCbBlockCommentAtFirstColumn.setSelected(mySettings.BLOCK_COMMENT_AT_FIRST_COLUMN); myCbGenerateFinalLocals.setSelected(mySettings.GENERATE_FINAL_LOCALS); myCbGenerateFinalParameters.setSelected(mySettings.GENERATE_FINAL_PARAMETERS); myMembersOrderList.reset(mySettings); myCbUseExternalAnnotations.setSelected(mySettings.USE_EXTERNAL_ANNOTATIONS); myInsertOverrideAnnotationCheckBox.setSelected(mySettings.INSERT_OVERRIDE_ANNOTATION); } public void apply() { mySettings.PREFER_LONGER_NAMES = myCbPreferLongerNames.isSelected(); mySettings.FIELD_NAME_PREFIX = myFieldPrefixField.getText().trim(); mySettings.STATIC_FIELD_NAME_PREFIX = myStaticFieldPrefixField.getText().trim(); mySettings.PARAMETER_NAME_PREFIX = myParameterPrefixField.getText().trim(); mySettings.LOCAL_VARIABLE_NAME_PREFIX = myLocalVariablePrefixField.getText().trim(); mySettings.FIELD_NAME_SUFFIX = myFieldSuffixField.getText().trim(); mySettings.STATIC_FIELD_NAME_SUFFIX = myStaticFieldSuffixField.getText().trim(); mySettings.PARAMETER_NAME_SUFFIX = myParameterSuffixField.getText().trim(); mySettings.LOCAL_VARIABLE_NAME_SUFFIX = myLocalVariableSuffixField.getText().trim(); mySettings.LINE_COMMENT_AT_FIRST_COLUMN = myCbLineCommentAtFirstColumn.isSelected(); mySettings.BLOCK_COMMENT_AT_FIRST_COLUMN = myCbBlockCommentAtFirstColumn.isSelected(); mySettings.GENERATE_FINAL_LOCALS = myCbGenerateFinalLocals.isSelected(); mySettings.GENERATE_FINAL_PARAMETERS = myCbGenerateFinalParameters.isSelected(); mySettings.USE_EXTERNAL_ANNOTATIONS = myCbUseExternalAnnotations.isSelected(); mySettings.INSERT_OVERRIDE_ANNOTATION = myInsertOverrideAnnotationCheckBox.isSelected(); myMembersOrderList.apply(mySettings); for (Project project : ProjectManager.getInstance().getOpenProjects()) { DaemonCodeAnalyzer.getInstance(project).settingsChanged(); } } public boolean isModified() { boolean isModified = isModified(myCbPreferLongerNames, mySettings.PREFER_LONGER_NAMES); isModified |= isModified(myFieldPrefixField, mySettings.FIELD_NAME_PREFIX); isModified |= isModified(myStaticFieldPrefixField, mySettings.STATIC_FIELD_NAME_PREFIX); isModified |= isModified(myParameterPrefixField, mySettings.PARAMETER_NAME_PREFIX); isModified |= isModified(myLocalVariablePrefixField, mySettings.LOCAL_VARIABLE_NAME_PREFIX); isModified |= isModified(myFieldSuffixField, mySettings.FIELD_NAME_SUFFIX); isModified |= isModified(myStaticFieldSuffixField, mySettings.STATIC_FIELD_NAME_SUFFIX); isModified |= isModified(myParameterSuffixField, mySettings.PARAMETER_NAME_SUFFIX); isModified |= isModified(myLocalVariableSuffixField, mySettings.LOCAL_VARIABLE_NAME_SUFFIX); isModified |= isModified(myCbLineCommentAtFirstColumn, mySettings.LINE_COMMENT_AT_FIRST_COLUMN); isModified |= isModified(myCbBlockCommentAtFirstColumn, mySettings.BLOCK_COMMENT_AT_FIRST_COLUMN); isModified |= isModified(myCbGenerateFinalLocals, mySettings.GENERATE_FINAL_LOCALS); isModified |= isModified(myCbGenerateFinalParameters, mySettings.GENERATE_FINAL_PARAMETERS); isModified |= isModified(myCbUseExternalAnnotations, mySettings.USE_EXTERNAL_ANNOTATIONS); isModified |= isModified(myInsertOverrideAnnotationCheckBox, mySettings.INSERT_OVERRIDE_ANNOTATION); isModified |= myMembersOrderList.isModified(mySettings); return isModified; } private static boolean isModified(JCheckBox checkBox, boolean value) { return checkBox.isSelected() != value; } private static boolean isModified(JTextField textField, String value) { return !textField.getText().trim().equals(value); } private static class MembersOrderList extends JList { private static final String FIELDS = ApplicationBundle.message("listbox.members.order.fields"); private static final String METHODS = ApplicationBundle.message("listbox.members.order.methods"); private static final String CONSTRUCTORS = ApplicationBundle.message("listbox.members.order.constructors"); private static final String INNER_CLASSES = ApplicationBundle.message("listbox.members.order.inner.classes"); private final DefaultListModel myModel; public MembersOrderList() { myModel = new DefaultListModel(); setModel(myModel); setVisibleRowCount(4); } public void reset(final CodeStyleSettings settings) { myModel.removeAllElements(); String[] strings = getStrings(settings); for (String string : strings) { myModel.addElement(string); } setSelectedIndex(0); } private static String[] getStrings(final CodeStyleSettings settings) { String[] strings = new String[]{FIELDS, METHODS, CONSTRUCTORS, INNER_CLASSES}; Arrays.sort(strings, new Comparator<String>() { public int compare(String o1, String o2) { int weight1 = getWeight(o1); int weight2 = getWeight(o2); return weight1 - weight2; } private int getWeight(String o) { if (FIELDS.equals(o)) { return settings.FIELDS_ORDER_WEIGHT; } else if (METHODS.equals(o)) { return settings.METHODS_ORDER_WEIGHT; } else if (CONSTRUCTORS.equals(o)) { return settings.CONSTRUCTORS_ORDER_WEIGHT; } else if (INNER_CLASSES.equals(o)) { return settings.INNER_CLASSES_ORDER_WEIGHT; } else { throw new IllegalArgumentException("unexpected " + o); } } }); return strings; } public void apply(CodeStyleSettings settings) { for (int i = 0; i < myModel.size(); i++) { Object o = myModel.getElementAt(i); int weight = i + 1; if (FIELDS.equals(o)) { settings.FIELDS_ORDER_WEIGHT = weight; } else if (METHODS.equals(o)) { settings.METHODS_ORDER_WEIGHT = weight; } else if (CONSTRUCTORS.equals(o)) { settings.CONSTRUCTORS_ORDER_WEIGHT = weight; } else if (INNER_CLASSES.equals(o)) { settings.INNER_CLASSES_ORDER_WEIGHT = weight; } else { throw new IllegalArgumentException("unexpected " + o); } } } public boolean isModified(CodeStyleSettings settings) { String[] oldStrings = getStrings(settings); String[] newStrings = new String[myModel.size()]; for (int i = 0; i < newStrings.length; i++) { newStrings[i] = (String)myModel.getElementAt(i); } return !Arrays.equals(newStrings, oldStrings); } } }
apache-2.0
gdefias/JavaDemo
InitJava/base/src/main/java/I18n/zone.java
3700
package I18n; public @interface zone {} /** * Created by Defias on 2020/07. * Description: 国际化(i18n) 【背景】 现代软件开发,往往做出的应用程序不止给一个国家的人去使用。不同国家的人往往存在语言文字不通的问题。由此产生了国际化(internationalization)、多语言 (multi-language)、本地化(locale)这些词,它们其实都是一个意思,支持多种语言,提供给不同国家的用户使用 【标准】 ISO-639标准使用编码定义了国际上常见的语言,每一种语言由两个小写字母表示 ISO-3166标准使用编码定义了国家/地区,每个国家/地区由两个大写字母表示 为什么要用语言+国家/地区来标示语言呢? 道理很简单:拿咱们中国来说,同样是中文,全国各地的方言就多如牛毛,更不要说世界上有那么多种语言,得有多少方言。 国家、地区的语言编码如: ---------------------------------------------------- 国家/地区 语言编码 简体中文(中国) zh-cn 繁体中文(台湾地区) zh-tw 繁体中文(香港) zh-hk 英语(香港) en-hk 英语(美国) en-us 英语(英国) en-gb 英语(全球) en-ww 韩文(韩国) ko-kr 日语(日本) ja-jp 【Java中实现国际化的方法】 实现国际化,归根结底就是根据语言类型去定义好字符串模板而已,Java中的多语言字符串模板一般保存在properties资源文件中 它必须遵照以下的命名规范: <资源名>_<语言代码>_<国家/地区代码>.properties 其中语言代码和国家/地区代码都是可选的 <资源名>.properties 默认的资源文件,即某个本地化类型在系统中找不到对应的资源文件,就采用这个默认的资源文件 例: 定义中英文两种多语言资源文件,将其置于com.notes.locale.resources路径下: content_en_US.properties: helloWorld = HelloWorld! time = Thecurrenttimeis%s. content_zh_CN.properties: helloWorld = \u4e16\u754c\uff0c\u4f60\u597d\uff01 time = \u5f53\u524d\u65f6\u95f4\u662f\u0025\u0073\u3002 两种语言的Key完全一致,只是Value是对应语言的字符串 本地化不同的同一资源文件,虽然属性值各不相同,但属性名却是相同的,这样应用程序就可以通过Locale对象和属性名精确调用到某个具体的属性值了 为了达到跨编码也正常显示的目的,有必要将非ASCII字符转为Unicode编码。上面的中文资源文件就是中文转为Unicode的结果 【Unicode转换工具】 JDK在bin目录下为我们提供了一个Unicode转换工具:native2ascii 它可以将中文字符的资源文件转换为Unicode代码格式的文件,命令格式如下: native2ascii [-reverse] [-encoding 编码] [输入文件 [输出文件]] 例: 假设content_zh_CN.properties 在d:\ 目录。执行以下命令可以新建content_zh_CN_new.properties,其中的内容就所有中文字符转为UTF-8编码格式的结果 native2ascii -encoding utf-8 d:\content_zh_CN.properties d:\content_zh_CN_new.properties 【加载资源文件】 定义了多语言资源文件,下一步就是加载资源文件了,Java为我们提供了用于加载本地化资源文件的工具类:java.util.ResourceBoundle 【支持国际化的国际化工具类】 Java中也提供了几个支持国际化的格式化工具类。例如:NumberFormat、DateFormat、MessageFormat */
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-chime/src/main/java/com/amazonaws/services/chime/model/UpdatePhoneNumberRequestItem.java
6277
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.chime.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * The phone number ID and product type fields to update, used with the <a>BatchUpdatePhoneNumber</a> and * <a>UpdatePhoneNumber</a> actions. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/chime-2018-05-01/UpdatePhoneNumberRequestItem" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class UpdatePhoneNumberRequestItem implements Serializable, Cloneable, StructuredPojo { /** * <p> * The phone number ID to update. * </p> */ private String phoneNumberId; /** * <p> * The product type to update. * </p> */ private String productType; /** * <p> * The phone number ID to update. * </p> * * @param phoneNumberId * The phone number ID to update. */ public void setPhoneNumberId(String phoneNumberId) { this.phoneNumberId = phoneNumberId; } /** * <p> * The phone number ID to update. * </p> * * @return The phone number ID to update. */ public String getPhoneNumberId() { return this.phoneNumberId; } /** * <p> * The phone number ID to update. * </p> * * @param phoneNumberId * The phone number ID to update. * @return Returns a reference to this object so that method calls can be chained together. */ public UpdatePhoneNumberRequestItem withPhoneNumberId(String phoneNumberId) { setPhoneNumberId(phoneNumberId); return this; } /** * <p> * The product type to update. * </p> * * @param productType * The product type to update. * @see PhoneNumberProductType */ public void setProductType(String productType) { this.productType = productType; } /** * <p> * The product type to update. * </p> * * @return The product type to update. * @see PhoneNumberProductType */ public String getProductType() { return this.productType; } /** * <p> * The product type to update. * </p> * * @param productType * The product type to update. * @return Returns a reference to this object so that method calls can be chained together. * @see PhoneNumberProductType */ public UpdatePhoneNumberRequestItem withProductType(String productType) { setProductType(productType); return this; } /** * <p> * The product type to update. * </p> * * @param productType * The product type to update. * @return Returns a reference to this object so that method calls can be chained together. * @see PhoneNumberProductType */ public UpdatePhoneNumberRequestItem withProductType(PhoneNumberProductType productType) { this.productType = productType.toString(); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getPhoneNumberId() != null) sb.append("PhoneNumberId: ").append(getPhoneNumberId()).append(","); if (getProductType() != null) sb.append("ProductType: ").append(getProductType()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof UpdatePhoneNumberRequestItem == false) return false; UpdatePhoneNumberRequestItem other = (UpdatePhoneNumberRequestItem) obj; if (other.getPhoneNumberId() == null ^ this.getPhoneNumberId() == null) return false; if (other.getPhoneNumberId() != null && other.getPhoneNumberId().equals(this.getPhoneNumberId()) == false) return false; if (other.getProductType() == null ^ this.getProductType() == null) return false; if (other.getProductType() != null && other.getProductType().equals(this.getProductType()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getPhoneNumberId() == null) ? 0 : getPhoneNumberId().hashCode()); hashCode = prime * hashCode + ((getProductType() == null) ? 0 : getProductType().hashCode()); return hashCode; } @Override public UpdatePhoneNumberRequestItem clone() { try { return (UpdatePhoneNumberRequestItem) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.chime.model.transform.UpdatePhoneNumberRequestItemMarshaller.getInstance().marshall(this, protocolMarshaller); } }
apache-2.0
fengshao0907/hazelcast-simulator
simulator/src/main/java/com/hazelcast/simulator/protocol/processors/CoordinatorOperationProcessor.java
1537
package com.hazelcast.simulator.protocol.processors; import com.hazelcast.simulator.protocol.core.ResponseType; import com.hazelcast.simulator.protocol.exception.LocalExceptionLogger; import com.hazelcast.simulator.protocol.operation.ExceptionOperation; import com.hazelcast.simulator.protocol.operation.OperationType; import com.hazelcast.simulator.protocol.operation.SimulatorOperation; import static com.hazelcast.simulator.protocol.core.ResponseType.SUCCESS; import static com.hazelcast.simulator.protocol.core.ResponseType.UNSUPPORTED_OPERATION_ON_THIS_PROCESSOR; /** * An {@link OperationProcessor} implementation to process {@link SimulatorOperation} instances on a Simulator Coordinator. */ public class CoordinatorOperationProcessor extends OperationProcessor { private final LocalExceptionLogger exceptionLogger; public CoordinatorOperationProcessor(LocalExceptionLogger exceptionLogger) { super(exceptionLogger); this.exceptionLogger = exceptionLogger; } @Override protected ResponseType processOperation(OperationType operationType, SimulatorOperation operation) throws Exception { switch (operationType) { case EXCEPTION: processException((ExceptionOperation) operation); break; default: return UNSUPPORTED_OPERATION_ON_THIS_PROCESSOR; } return SUCCESS; } private void processException(ExceptionOperation operation) { exceptionLogger.logOperation(operation); } }
apache-2.0
variac/bazel
src/main/java/com/google/devtools/build/lib/skyframe/RegisteredToolchainsFunction.java
6249
// Copyright 2017 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import com.google.common.collect.ImmutableList; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.analysis.PlatformConfiguration; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.analysis.platform.DeclaredToolchainInfo; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.rules.ExternalPackageUtil; import com.google.devtools.build.lib.rules.ExternalPackageUtil.ExternalPackageException; import com.google.devtools.build.lib.skyframe.ConfiguredTargetFunction.ConfiguredValueCreationException; import com.google.devtools.build.skyframe.LegacySkyKey; import com.google.devtools.build.skyframe.SkyFunction; import com.google.devtools.build.skyframe.SkyFunctionException; import com.google.devtools.build.skyframe.SkyFunctionException.Transience; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import com.google.devtools.build.skyframe.ValueOrException; import java.util.List; import java.util.Map; import javax.annotation.Nullable; /** * {@link SkyFunction} that returns all registered toolchains available for toolchain resolution. */ public class RegisteredToolchainsFunction implements SkyFunction { @Nullable @Override public SkyValue compute(SkyKey skyKey, Environment env) throws SkyFunctionException, InterruptedException { BuildConfiguration configuration = (BuildConfiguration) skyKey.argument(); ImmutableList.Builder<Label> registeredToolchainLabels = new ImmutableList.Builder<>(); // Get the toolchains from the configuration. PlatformConfiguration platformConfiguration = configuration.getFragment(PlatformConfiguration.class); registeredToolchainLabels.addAll(platformConfiguration.getExtraToolchains()); // Get the registered toolchains from the WORKSPACE. registeredToolchainLabels.addAll(getWorkspaceToolchains(env)); if (env.valuesMissing()) { return null; } // Load the configured target for each, and get the declared toolchain providers. ImmutableList<DeclaredToolchainInfo> registeredToolchains = configureRegisteredToolchains(env, configuration, registeredToolchainLabels.build()); if (env.valuesMissing()) { return null; } return RegisteredToolchainsValue.create(registeredToolchains); } private Iterable<? extends Label> getWorkspaceToolchains(Environment env) throws ExternalPackageException, InterruptedException { List<Label> labels = ExternalPackageUtil.getRegisteredToolchainLabels(env); if (labels == null) { return ImmutableList.of(); } return labels; } private ImmutableList<DeclaredToolchainInfo> configureRegisteredToolchains( Environment env, BuildConfiguration configuration, List<Label> labels) throws InterruptedException, RegisteredToolchainsFunctionException { ImmutableList<SkyKey> keys = labels .stream() .map( label -> LegacySkyKey.create( SkyFunctions.CONFIGURED_TARGET, new ConfiguredTargetKey(label, configuration))) .collect(ImmutableList.toImmutableList()); Map<SkyKey, ValueOrException<ConfiguredValueCreationException>> values = env.getValuesOrThrow(keys, ConfiguredValueCreationException.class); if (env.valuesMissing()) { return null; } ImmutableList.Builder<DeclaredToolchainInfo> toolchains = new ImmutableList.Builder<>(); for (SkyKey key : keys) { ConfiguredTargetKey configuredTargetKey = (ConfiguredTargetKey) key.argument(); Label toolchainLabel = configuredTargetKey.getLabel(); try { ConfiguredTarget target = ((ConfiguredTargetValue) values.get(key).get()).getConfiguredTarget(); DeclaredToolchainInfo toolchainInfo = target.getProvider(DeclaredToolchainInfo.class); if (toolchainInfo == null) { throw new RegisteredToolchainsFunctionException( new InvalidTargetException(toolchainLabel), Transience.PERSISTENT); } toolchains.add(toolchainInfo); } catch (ConfiguredValueCreationException e) { throw new RegisteredToolchainsFunctionException(e, Transience.PERSISTENT); } } return toolchains.build(); } @Nullable @Override public String extractTag(SkyKey skyKey) { return null; } /** * Used to indicate that the given {@link Label} represents a {@link ConfiguredTarget} which is * not a valid {@link DeclaredToolchainInfo} provider. */ public static final class InvalidTargetException extends Exception { private final Label invalidLabel; public InvalidTargetException(Label invalidLabel) { super(String.format("target '%s' does not provide a toolchain", invalidLabel)); this.invalidLabel = invalidLabel; } public Label getInvalidLabel() { return invalidLabel; } } /** * Used to declare all the exception types that can be wrapped in the exception thrown by {@link * #compute}. */ public static class RegisteredToolchainsFunctionException extends SkyFunctionException { public RegisteredToolchainsFunctionException( InvalidTargetException cause, Transience transience) { super(cause, transience); } public RegisteredToolchainsFunctionException( ConfiguredValueCreationException cause, Transience persistent) { super(cause, persistent); } } }
apache-2.0
mcwarman/interlok
adapter/src/main/java/com/adaptris/core/services/jmx/JmxWaitService.java
4647
/* * Copyright 2015 Adaptris Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.adaptris.core.services.jmx; import java.util.concurrent.TimeUnit; import javax.management.MBeanServerConnection; import javax.management.ObjectName; import javax.validation.Valid; import com.adaptris.annotation.AdapterComponent; import com.adaptris.annotation.AdvancedConfig; import com.adaptris.annotation.ComponentProfile; import com.adaptris.annotation.DisplayOrder; import com.adaptris.annotation.InputFieldDefault; import com.adaptris.core.AdaptrisMessage; import com.adaptris.core.ServiceException; import com.adaptris.core.interceptor.InFlightWorkflowInterceptor; import com.adaptris.core.jmx.JmxConnection; import com.adaptris.core.util.ExceptionHelper; import com.adaptris.util.TimeInterval; import com.thoughtworks.xstream.annotations.XStreamAlias; /** * <p> * Allows you to make a remote call on a JMX operation and wait until the result from the JMX operation is "true" * </p> * <p> * You can set parameters for the call using {@link ValueTranslator} implements. The result of the JMX operation is always * expected to be a {@link Boolean}. When used in conjunction with {@link InFlightWorkflowInterceptor} you can use this service * to pause processing until the workflow in question does not have any in flight messages. * </p> * * @config jmx-wait-service * @since 3.3.0 */ @XStreamAlias("jmx-wait-service") @AdapterComponent @ComponentProfile(summary = "Execute a JMX operation", tag = "service,jmx", recommended = {JmxConnection.class}) @DisplayOrder(order = {"objectName", "operationName", "operationParameters", "negate"}) public class JmxWaitService extends JmxOperationServiceImpl { private static final TimeInterval DEFAULT_INTERVAL = new TimeInterval(10L, TimeUnit.SECONDS); @InputFieldDefault(value = "false") private Boolean negate; @Valid @AdvancedConfig private TimeInterval retryInterval; private transient JmxOperationInvoker invoker; public JmxWaitService() { super(); setInvoker(new JmxOperationInvoker<Boolean>()); } @Override public void doService(AdaptrisMessage message) throws ServiceException { try { MBeanServerConnection mbeanConn = getConnection().retrieveConnection(JmxConnection.class).mbeanServerConnection(); Object[] params = parametersToArray(message); String[] paramTypes = parametersToTypeArray(message); ObjectName objectNameInst = ObjectName.getInstance(getObjectName()); boolean conditionReached = evaluate(getInvoker().invoke(mbeanConn, objectNameInst, getOperationName(), params, paramTypes)); while (!conditionReached) { Thread.sleep(retryInterval()); conditionReached = evaluate(getInvoker().invoke(mbeanConn, objectNameInst, getOperationName(), params, paramTypes)); } } catch (Exception e) { throw ExceptionHelper.wrapServiceException(e); } } private boolean evaluate(Boolean result) { if (negate()) { return !result; } return result; } /** * @return the invoker */ private JmxOperationInvoker<Boolean> getInvoker() { return invoker; } /** * @param invoker the invoker to set */ void setInvoker(JmxOperationInvoker<Boolean> invoker) { this.invoker = invoker; } /** * @return the retryInterval */ public TimeInterval getRetryInterval() { return retryInterval; } /** * @param retryInterval the retryInterval to set */ public void setRetryInterval(TimeInterval retryInterval) { this.retryInterval = retryInterval; } long retryInterval() { return getRetryInterval() != null ? getRetryInterval().toMilliseconds() : DEFAULT_INTERVAL.toMilliseconds(); } /** * @return the negate */ public Boolean getNegate() { return negate; } /** * Switches the success criteria to {@code !operation} if set to true. * * @param n the negate to set, defaults to false. */ public void setNegate(Boolean n) { this.negate = n; } boolean negate() { return getNegate() != null ? getNegate().booleanValue() : false; } }
apache-2.0
gdefias/StudyJava
InitJava/base/src/main/java/FileIO/NotIOStream/TestRandomAccessFile2.java
1734
package FileIO.NotIOStream; /** * Created by Defias on 2017/2/24. * * 随机访问文件实例 * */ import java.io.*; public class TestRandomAccessFile2 { public static void main(String[] args) throws IOException { // Create a random access file RandomAccessFile inout = new RandomAccessFile("inout.dat", "rw"); // Clear the file to destroy the old contents if exists inout.setLength(0); // Write new integers to the file for (int i = 0; i < 200; i++) inout.writeInt(i); // Display the current length of the file System.out.println("Current file length is " + inout.length()); // Retrieve the first number inout.seek(0); // Move the file pointer to the beginning System.out.println("The first number is " + inout.readInt()); // Retrieve the second number inout.seek(1 * 4); // Move the file pointer to the second number System.out.println("The second number is " + inout.readInt()); // Retrieve the tenth number inout.seek(9 * 4); // Move the file pointer to the tenth number System.out.println("The tenth number is " + inout.readInt()); // Modify the eleventh number inout.writeInt(555); // Append a new number inout.seek(inout.length()); // Move the file pointer to the end inout.writeInt(999); // Display the new length System.out.println("The new length is " + inout.length()); // Retrieve the new eleventh number inout.seek(10 * 4); // Move the file pointer to the eleventh number System.out.println("The eleventh number is " + inout.readInt()); inout.close(); } }
apache-2.0
Ztiany/AndroidBase
lib_base/src/main/java/com/android/base/adapter/pager/ViewPageFragmentAdapter.java
1248
package com.android.base.adapter.pager; import android.content.Context; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentPagerAdapter; import java.util.ArrayList; import java.util.List; @SuppressWarnings("unused") public class ViewPageFragmentAdapter extends FragmentPagerAdapter { private final List<ViewPageInfo> mTabs; private Context mContext; public ViewPageFragmentAdapter(FragmentManager fragmentManager, Context context) { super(fragmentManager); mContext = context; mTabs = new ArrayList<>(); } public void setDataSource(List<ViewPageInfo> viewPageInfoList) { mTabs.clear(); mTabs.addAll(viewPageInfoList); } @Override public int getCount() { return mTabs.size(); } @Override public Fragment getItem(int position) { ViewPageInfo viewPageInfo = mTabs.get(position); return Fragment.instantiate(mContext, viewPageInfo.clazz.getName(), viewPageInfo.args); } @Override public CharSequence getPageTitle(int position) { return mTabs.get(position).title; } public List<ViewPageInfo> getTabs() { return mTabs; } }
apache-2.0
egzosn/pay-java-parent
pay-java-common/src/main/java/com/egzosn/pay/common/bean/TransferType.java
445
package com.egzosn.pay.common.bean; import java.util.Map; /** * 转账类型 * @author egan * email egzosn@gmail.com * date 2018/9/28.19:45 */ public interface TransferType extends TransactionType{ /** * 设置属性 * * @param attr 已有属性对象 * @param order 转账订单 * @return 属性对象 */ Map<String, Object> setAttr(Map<String, Object> attr, TransferOrder order); }
apache-2.0
ZhongXinWang/wzxWeather
app/src/main/java/weather/test/wzx/com/wzxweather/bases/BaseActivity.java
570
package weather.test.wzx.com.wzxweather.bases; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import weather.test.wzx.com.wzxweather.util.ActivityQuenu; public abstract class BaseActivity extends AppCompatActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); ActivityQuenu.addActivity(this); } @Override protected void onDestroy() { super.onDestroy(); //移除当前的程序 ActivityQuenu.removeActivity(this); } }
apache-2.0
CMPUT301W16T08/scaling-pancake
app/src/main/java/cmput301w16t08/scaling_pancake/activities/MenuActivity.java
3566
package cmput301w16t08.scaling_pancake.activities; import android.content.Intent; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.view.View; import android.widget.Toast; import org.apache.commons.lang3.ObjectUtils; import cmput301w16t08.scaling_pancake.controllers.Controller; import cmput301w16t08.scaling_pancake.R; import cmput301w16t08.scaling_pancake.models.Bid; import cmput301w16t08.scaling_pancake.models.BidList; import cmput301w16t08.scaling_pancake.models.InstrumentList; import cmput301w16t08.scaling_pancake.models.User; /** * The <code>MenuActivity</code> provides a set of options for the user to click on. * Each option navigates to a corresponding <code>Activity</code>. * This is the main "dashboard" of the application. * * @author cmput301w16t08 * @see ViewProfileActivity * @see SearchInstrumentsActivity * @see InstrumentListActivity * @see AddInstrumentActivity * @see Controller * */ public class MenuActivity extends AppCompatActivity { // set up our global controller private static Controller controller; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_menu); controller = (Controller) getApplicationContext(); } protected void onResume() { super.onResume(); View notif_button = findViewById(R.id.new_bid_notif_button); User current_user = controller.getCurrentUser(); try { if (!current_user.getNewBidFlag()) { notif_button.setVisibility(View.INVISIBLE); } } catch (NullPointerException e) { Toast.makeText(controller, "Warning! You are not logged in!", Toast.LENGTH_SHORT).show(); notif_button.setVisibility(View.INVISIBLE); } } /** * Navigate to the <code>ViewProfileActivity</code> * @param view */ public void viewProfile(View view){ Intent intent = new Intent(this, ViewProfileActivity.class); intent.putExtra("user_id", controller.getCurrentUser().getId()); startActivity(intent); } /** * Navigate to the <code>AddInstrumentActivity</code> * @param view */ public void addInstruments(View view){ Intent intent = new Intent(this, AddInstrumentActivity.class); startActivity(intent); } /** * Navigate to the <code>InstrumentListActivity</code> * @param view */ public void viewInstruments(View view) { Intent intent = new Intent(this, InstrumentListActivity.class); startActivity(intent); } /** * Navigate to the <code>SearchInstrumentsActivity</code> * @param view */ public void searchInstruments(View view){ Intent intent = new Intent(this, SearchInstrumentsActivity.class); startActivity(intent); } /** * Navigate to the <code>InstrumentListActivity</code>, where the user * can view the instruments which have new bids. * @param view */ public void viewNotification(View view){ Intent intent = new Intent(this, InstrumentListActivity.class); controller.resetNewBidFlag(); startActivity(intent); } /** * Log the user out and return to the login screen * @param view */ public void logout(View view){ controller.logout(); Intent intent = new Intent(this, MainActivity.class); finish(); startActivity(intent); } }
apache-2.0
LtLei/RxHttp
RxHttpLib/rxhttp/src/main/java/com/lei/lib/java/rxhttp/progress/ProgressListener.java
154
package com.lei.lib.java.rxhttp.progress; public interface ProgressListener { void onProgress(long currentBytes, long contentLength, boolean done); }
apache-2.0
wjsl/jaredcumulo
core/src/main/java/org/apache/accumulo/core/iterators/system/ColumnQualifierFilter.java
3790
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.accumulo.core.iterators.system; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import org.apache.accumulo.core.data.ArrayByteSequence; import org.apache.accumulo.core.data.ByteSequence; import org.apache.accumulo.core.data.Column; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.iterators.Filter; import org.apache.accumulo.core.iterators.IteratorEnvironment; import org.apache.accumulo.core.iterators.SortedKeyValueIterator; public class ColumnQualifierFilter extends Filter { private boolean scanColumns; private HashSet<ByteSequence> columnFamilies; private HashMap<ByteSequence,HashSet<ByteSequence>> columnsQualifiers; public ColumnQualifierFilter() {} public ColumnQualifierFilter(SortedKeyValueIterator<Key,Value> iterator, HashSet<Column> columns) { setSource(iterator); init(columns); } public ColumnQualifierFilter(SortedKeyValueIterator<Key,Value> iterator, HashSet<ByteSequence> columnFamilies, HashMap<ByteSequence,HashSet<ByteSequence>> columnsQualifiers, boolean scanColumns) { setSource(iterator); this.columnFamilies = columnFamilies; this.columnsQualifiers = columnsQualifiers; this.scanColumns = scanColumns; } public boolean accept(Key key, Value v) { if (!scanColumns) return true; if (columnFamilies.contains(key.getColumnFamilyData())) return true; HashSet<ByteSequence> cfset = columnsQualifiers.get(key.getColumnQualifierData()); // ensure the columm qualifier goes with a paired column family, // it is possible that a column qualifier could occur with a // column family it was not paired with return cfset != null && cfset.contains(key.getColumnFamilyData()); } public void init(HashSet<Column> columns) { this.columnFamilies = new HashSet<ByteSequence>(); this.columnsQualifiers = new HashMap<ByteSequence,HashSet<ByteSequence>>(); for (Iterator<Column> iter = columns.iterator(); iter.hasNext();) { Column col = iter.next(); if (col.columnQualifier != null) { ArrayByteSequence cq = new ArrayByteSequence(col.columnQualifier); HashSet<ByteSequence> cfset = this.columnsQualifiers.get(cq); if (cfset == null) { cfset = new HashSet<ByteSequence>(); this.columnsQualifiers.put(cq, cfset); } cfset.add(new ArrayByteSequence(col.columnFamily)); } else { // this whole column family should pass columnFamilies.add(new ArrayByteSequence(col.columnFamily)); } } // only take action when column qualifies are present scanColumns = this.columnsQualifiers.size() > 0; } @Override public SortedKeyValueIterator<Key,Value> deepCopy(IteratorEnvironment env) { return new ColumnQualifierFilter(getSource().deepCopy(env), columnFamilies, columnsQualifiers, scanColumns); } }
apache-2.0
Taller/sqlworkbench-plus
src/workbench/gui/settings/WindowTitleOptionsPanel.java
13037
/* * WindowTitleOptionsPanel.java * * This file is part of SQL Workbench/J, http://www.sql-workbench.net * * Copyright 2002-2015, Thomas Kellerer * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * To contact the author please send an email to: support@sql-workbench.net * */ package workbench.gui.settings; import workbench.gui.components.WbTraversalPolicy; import workbench.resource.GuiSettings; import workbench.resource.ResourceMgr; /** * * @author Thomas Kellerer */ public class WindowTitleOptionsPanel extends javax.swing.JPanel implements workbench.interfaces.Restoreable { public WindowTitleOptionsPanel() { super(); initComponents(); // It is important to add these in the correct order // which is defined by the numeric values from Settings.SHOW_NO_FILENAME // SHOW_FILENAME and SHOW_FULL_PATH this.windowTitleComboBox.addItem(ResourceMgr.getString("TxtShowNone")); this.windowTitleComboBox.addItem(ResourceMgr.getString("TxtShowName")); this.windowTitleComboBox.addItem(ResourceMgr.getString("TxtShowPath")); WbTraversalPolicy policy = new WbTraversalPolicy(); policy.addComponent(productAtEnd); policy.addComponent(showProfileGroup); policy.addComponent(showWorkspace); policy.addComponent(windowTitleComboBox); policy.setDefaultComponent(productAtEnd); this.encloseChar.insertItemAt(ResourceMgr.getString("TxtNothingItem"), 0); this.setFocusTraversalPolicy(policy); this.setFocusCycleRoot(false); this.restoreSettings(); } @Override public final void restoreSettings() { int type = GuiSettings.getShowFilenameInWindowTitle(); if (type >= GuiSettings.SHOW_NO_FILENAME && type <= GuiSettings.SHOW_FULL_PATH) { this.windowTitleComboBox.setSelectedIndex(type); } this.showProfileGroup.setSelected(GuiSettings.getShowProfileGroupInWindowTitle()); this.showWorkspace.setSelected(GuiSettings.getShowWorkspaceInWindowTitle()); this.productAtEnd.setSelected(GuiSettings.getShowProductNameAtEnd()); this.showUrl.setSelected(GuiSettings.getShowURLinWindowTitle()); this.includeUser.setSelected(GuiSettings.getIncludeUserInTitleURL()); this.includeUser.setEnabled(showUrl.isSelected()); String enclose = GuiSettings.getTitleGroupBracket(); if (enclose == null) { encloseChar.setSelectedIndex(0); } else { int count = encloseChar.getItemCount(); for (int i = 1; i < count; i++) { String item = (String) encloseChar.getItemAt(i); if (item.startsWith(enclose.trim())) { encloseChar.setSelectedIndex(i); break; } } } checkShowProfile(); this.titleGroupSep.setText(GuiSettings.getTitleGroupSeparator()); } @Override public void saveSettings() { GuiSettings.setShowFilenameInWindowTitle(this.windowTitleComboBox.getSelectedIndex()); GuiSettings.setShowProfileGroupInWindowTitle(showProfileGroup.isSelected()); GuiSettings.setShowWorkspaceInWindowTitle(showWorkspace.isSelected()); GuiSettings.setShowProductNameAtEnd(productAtEnd.isSelected()); GuiSettings.setTitleGroupSeparator(titleGroupSep.getText()); GuiSettings.setShowURLinWindowTitle(showUrl.isSelected()); GuiSettings.setIncludeUserInTitleURL(includeUser.isSelected()); int index = this.encloseChar.getSelectedIndex(); if (index == 0) { GuiSettings.setTitleGroupBracket(null); } else { String bracket = (String) this.encloseChar.getSelectedItem(); GuiSettings.setTitleGroupBracket(bracket.substring(0, 1)); } } protected void checkShowProfile() { this.encloseChar.setEnabled(this.showProfileGroup.isSelected()); this.titleGroupSep.setEnabled(this.showProfileGroup.isSelected()); } /** This method is called from within the constructor to * initialize the form. * WARNING: Do NOT modify this code. The content of this method is * always regenerated by the Form Editor. */ // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { java.awt.GridBagConstraints gridBagConstraints; productAtEnd = new javax.swing.JCheckBox(); showProfileGroup = new javax.swing.JCheckBox(); showWorkspace = new javax.swing.JCheckBox(); windowTitleLabel = new javax.swing.JLabel(); windowTitleComboBox = new javax.swing.JComboBox(); encloseCharLabel = new javax.swing.JLabel(); encloseChar = new javax.swing.JComboBox(); jPanel1 = new javax.swing.JPanel(); titleGroupSepLabel = new javax.swing.JLabel(); titleGroupSep = new javax.swing.JTextField(); showUrl = new javax.swing.JCheckBox(); includeUser = new javax.swing.JCheckBox(); setLayout(new java.awt.GridBagLayout()); productAtEnd.setText(ResourceMgr.getString("LblShowProductAtEnd")); // NOI18N productAtEnd.setToolTipText(ResourceMgr.getString("d_LblShowProductAtEnd")); // NOI18N productAtEnd.setBorder(javax.swing.BorderFactory.createEmptyBorder(0, 0, 0, 0)); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 2; gridBagConstraints.gridwidth = 4; gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints.insets = new java.awt.Insets(6, 12, 2, 11); add(productAtEnd, gridBagConstraints); showProfileGroup.setText(ResourceMgr.getString("LblShowProfileGroup")); // NOI18N showProfileGroup.setToolTipText(ResourceMgr.getString("d_LblShowProfileGroup")); // NOI18N showProfileGroup.setBorder(null); showProfileGroup.setHorizontalAlignment(javax.swing.SwingConstants.LEFT); showProfileGroup.setHorizontalTextPosition(javax.swing.SwingConstants.RIGHT); showProfileGroup.setIconTextGap(5); showProfileGroup.addChangeListener(new javax.swing.event.ChangeListener() { public void stateChanged(javax.swing.event.ChangeEvent evt) { showProfileGroupStateChanged(evt); } }); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 4; gridBagConstraints.gridwidth = 4; gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints.insets = new java.awt.Insets(8, 12, 0, 11); add(showProfileGroup, gridBagConstraints); showWorkspace.setText(ResourceMgr.getString("LblShowWorkspace")); // NOI18N showWorkspace.setToolTipText(ResourceMgr.getString("d_LblShowWorkspace")); // NOI18N showWorkspace.setBorder(null); showWorkspace.setHorizontalAlignment(javax.swing.SwingConstants.LEFT); showWorkspace.setHorizontalTextPosition(javax.swing.SwingConstants.RIGHT); showWorkspace.setIconTextGap(5); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 3; gridBagConstraints.gridwidth = 4; gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints.insets = new java.awt.Insets(6, 12, 0, 1); add(showWorkspace, gridBagConstraints); windowTitleLabel.setLabelFor(windowTitleComboBox); windowTitleLabel.setText(ResourceMgr.getString("LblShowEditorInfo")); // NOI18N windowTitleLabel.setToolTipText(ResourceMgr.getString("d_LblShowEditorInfo")); // NOI18N gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 6; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; gridBagConstraints.insets = new java.awt.Insets(16, 12, 0, 0); add(windowTitleLabel, gridBagConstraints); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 6; gridBagConstraints.gridwidth = 3; gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST; gridBagConstraints.insets = new java.awt.Insets(15, 4, 0, 11); add(windowTitleComboBox, gridBagConstraints); encloseCharLabel.setText(ResourceMgr.getString("LblEncloseGroupChar")); // NOI18N encloseCharLabel.setToolTipText(ResourceMgr.getString("d_LblEncloseGroupChar")); // NOI18N gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 5; gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints.insets = new java.awt.Insets(10, 12, 2, 0); add(encloseCharLabel, gridBagConstraints); encloseChar.setModel(new javax.swing.DefaultComboBoxModel(new String[] { "( )", "{ }", "[ ]", "< >" })); encloseChar.setToolTipText(ResourceMgr.getDescription("LblEncloseGroupChar")); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 5; gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints.insets = new java.awt.Insets(10, 4, 0, 11); add(encloseChar, gridBagConstraints); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 3; gridBagConstraints.gridy = 6; gridBagConstraints.weightx = 1.0; gridBagConstraints.weighty = 1.0; add(jPanel1, gridBagConstraints); titleGroupSepLabel.setText(ResourceMgr.getString("LblGroupSeparator")); // NOI18N titleGroupSepLabel.setToolTipText(ResourceMgr.getString("d_LblGroupSeparator")); // NOI18N gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 2; gridBagConstraints.gridy = 5; gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints.insets = new java.awt.Insets(10, 2, 2, 0); add(titleGroupSepLabel, gridBagConstraints); titleGroupSep.setColumns(5); titleGroupSep.setToolTipText(ResourceMgr.getDescription("LblGroupSeparator")); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 3; gridBagConstraints.gridy = 5; gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints.insets = new java.awt.Insets(10, 5, 2, 11); add(titleGroupSep, gridBagConstraints); showUrl.setText(ResourceMgr.getString("LblUrlInTitle")); // NOI18N showUrl.setToolTipText(ResourceMgr.getString("d_LblUrlInTitle")); // NOI18N showUrl.setBorder(javax.swing.BorderFactory.createEmptyBorder(0, 0, 0, 0)); showUrl.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { showUrlActionPerformed(evt); } }); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 0; gridBagConstraints.gridwidth = 4; gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints.insets = new java.awt.Insets(10, 12, 2, 11); add(showUrl, gridBagConstraints); includeUser.setText(ResourceMgr.getString("LblUrlWithUser")); // NOI18N includeUser.setToolTipText(ResourceMgr.getString("d_LblUrlWithUser")); // NOI18N includeUser.setBorder(javax.swing.BorderFactory.createEmptyBorder(0, 0, 0, 0)); gridBagConstraints = new java.awt.GridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 1; gridBagConstraints.gridwidth = 4; gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST; gridBagConstraints.insets = new java.awt.Insets(6, 30, 2, 11); add(includeUser, gridBagConstraints); }// </editor-fold>//GEN-END:initComponents private void showProfileGroupStateChanged(javax.swing.event.ChangeEvent evt) {//GEN-FIRST:event_showProfileGroupStateChanged checkShowProfile(); }//GEN-LAST:event_showProfileGroupStateChanged private void showUrlActionPerformed(java.awt.event.ActionEvent evt)//GEN-FIRST:event_showUrlActionPerformed {//GEN-HEADEREND:event_showUrlActionPerformed includeUser.setEnabled(showUrl.isSelected()); }//GEN-LAST:event_showUrlActionPerformed // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JComboBox encloseChar; private javax.swing.JLabel encloseCharLabel; private javax.swing.JCheckBox includeUser; private javax.swing.JPanel jPanel1; private javax.swing.JCheckBox productAtEnd; private javax.swing.JCheckBox showProfileGroup; private javax.swing.JCheckBox showUrl; private javax.swing.JCheckBox showWorkspace; private javax.swing.JTextField titleGroupSep; private javax.swing.JLabel titleGroupSepLabel; private javax.swing.JComboBox windowTitleComboBox; private javax.swing.JLabel windowTitleLabel; // End of variables declaration//GEN-END:variables }
apache-2.0
ThorbenLindhauer/activiti-engine-ppi
modules/activiti-engine/src/test/java/org/activiti/examples/bpmn/executionlistener/ExecutionListenerTest.java
5876
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.activiti.examples.bpmn.executionlistener; import java.util.HashMap; import java.util.List; import java.util.Map; import org.activiti.engine.impl.test.PluggableActivitiTestCase; import org.activiti.engine.runtime.ProcessInstance; import org.activiti.engine.task.Task; import org.activiti.engine.test.Deployment; import org.activiti.examples.bpmn.executionlistener.RecorderExecutionListener.RecordedEvent; /** * @author Frederik Heremans */ public class ExecutionListenerTest extends PluggableActivitiTestCase { @Deployment(resources = {"org/activiti/examples/bpmn/executionlistener/ExecutionListenersProcess.bpmn20.xml"}) public void testExecutionListenersOnAllPossibleElements() { // Process start executionListener will have executionListener class that sets 2 variables ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("executionListenersProcess", "businessKey123"); String varSetInExecutionListener = (String) runtimeService.getVariable(processInstance.getId(), "variableSetInExecutionListener"); assertNotNull(varSetInExecutionListener); assertEquals("firstValue", varSetInExecutionListener); // Check if business key was available in execution listener String businessKey = (String) runtimeService.getVariable(processInstance.getId(), "businessKeyInExecution"); assertNotNull(businessKey); assertEquals("businessKey123", businessKey); // Transition take executionListener will set 2 variables Task task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); assertNotNull(task); taskService.complete(task.getId()); varSetInExecutionListener = (String) runtimeService.getVariable(processInstance.getId(), "variableSetInExecutionListener"); assertNotNull(varSetInExecutionListener); assertEquals("secondValue", varSetInExecutionListener); ExampleExecutionListenerPojo myPojo = new ExampleExecutionListenerPojo(); runtimeService.setVariable(processInstance.getId(), "myPojo", myPojo); task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); assertNotNull(task); taskService.complete(task.getId()); // First usertask uses a method-expression as executionListener: ${myPojo.myMethod(execution.eventName)} ExampleExecutionListenerPojo pojoVariable = (ExampleExecutionListenerPojo) runtimeService.getVariable(processInstance.getId(), "myPojo"); assertNotNull(pojoVariable.getReceivedEventName()); assertEquals("end", pojoVariable.getReceivedEventName()); task = taskService.createTaskQuery().processInstanceId(processInstance.getId()).singleResult(); assertNotNull(task); taskService.complete(task.getId()); assertProcessEnded(processInstance.getId()); } @Deployment(resources = {"org/activiti/examples/bpmn/executionlistener/ExecutionListenersStartEndEvent.bpmn20.xml"}) public void testExecutionListenersOnStartEndEvents() { RecorderExecutionListener.clear(); ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("executionListenersProcess"); assertProcessEnded(processInstance.getId()); List<RecordedEvent> recordedEvents = RecorderExecutionListener.getRecordedEvents(); assertEquals(4, recordedEvents.size()); assertEquals("theStart", recordedEvents.get(0).getActivityId()); assertEquals("Start Event", recordedEvents.get(0).getActivityName()); assertEquals("Start Event Listener", recordedEvents.get(0).getParameter()); assertEquals("end", recordedEvents.get(0).getEventName()); assertEquals("noneEvent", recordedEvents.get(1).getActivityId()); assertEquals("None Event", recordedEvents.get(1).getActivityName()); assertEquals("Intermediate Catch Event Listener", recordedEvents.get(1).getParameter()); assertEquals("end", recordedEvents.get(1).getEventName()); assertEquals("signalEvent", recordedEvents.get(2).getActivityId()); assertEquals("Signal Event", recordedEvents.get(2).getActivityName()); assertEquals("Intermediate Throw Event Listener", recordedEvents.get(2).getParameter()); assertEquals("start", recordedEvents.get(2).getEventName()); assertEquals("theEnd", recordedEvents.get(3).getActivityId()); assertEquals("End Event", recordedEvents.get(3).getActivityName()); assertEquals("End Event Listener", recordedEvents.get(3).getParameter()); assertEquals("start", recordedEvents.get(3).getEventName()); } @Deployment(resources = {"org/activiti/examples/bpmn/executionlistener/ExecutionListenersFieldInjectionProcess.bpmn20.xml"}) public void testExecutionListenerFieldInjection() { Map<String, Object> variables = new HashMap<String, Object>(); variables.put("myVar", "listening!"); ProcessInstance processInstance = runtimeService.startProcessInstanceByKey("executionListenersProcess", variables); Object varSetByListener = runtimeService.getVariable(processInstance.getId(), "var"); assertNotNull(varSetByListener); assertTrue(varSetByListener instanceof String); // Result is a concatenation of fixed injected field and injected expression assertEquals("Yes, I am listening!", varSetByListener); } }
apache-2.0
gocd-contrib/docker-swarm-elastic-agents
src/test/java/cd/go/contrib/elasticagents/dockerswarm/elasticagent/DockerSecretsTest.java
6154
/* * Copyright 2017 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package cd.go.contrib.elasticagents.dockerswarm.elasticagent; import com.spotify.docker.client.messages.swarm.Secret; import com.spotify.docker.client.messages.swarm.SecretBind; import com.spotify.docker.client.messages.swarm.SecretSpec; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import java.util.List; import static java.util.Arrays.asList; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class DockerSecretsTest { @Rule public ExpectedException thrown = ExpectedException.none(); @Test public void shouldBuildDockerSecretFromString() { final DockerSecrets dockerSecrets = DockerSecrets.fromString("src=Username, target=Foo, uid=uid,gid=gid, mode=640"); assertNotNull(dockerSecrets); assertThat(dockerSecrets, hasSize(1)); assertThat(dockerSecrets.get(0).name(), is("Username")); assertThat(dockerSecrets.get(0).file(), is("Foo")); assertThat(dockerSecrets.get(0).uid(), is("uid")); assertThat(dockerSecrets.get(0).gid(), is("gid")); assertThat(dockerSecrets.get(0).mode(), is(0640L)); } @Test public void shouldSkipEmptyLine() { final DockerSecrets dockerSecrets = DockerSecrets.fromString("src=Username, target=Foo, uid=UID\n\nsrc=Password, target=Bar"); assertNotNull(dockerSecrets); assertThat(dockerSecrets, hasSize(2)); assertThat(dockerSecrets.get(0).name(), is("Username")); assertThat(dockerSecrets.get(1).name(), is("Password")); } @Test public void shouldBuildSecretBindFromDockerSecret() { final DockerSecrets dockerSecrets = DockerSecrets.fromString("src=Username, target=username, uid=uid, gid=gid, mode=0640\nsrc=Password, target=passwd, uid=uid, gid=gid, mode=0640"); final Secret secretForUsername = mock(Secret.class); final Secret secretForPassword = mock(Secret.class); when(secretForUsername.secretSpec()).thenReturn(SecretSpec.builder().name("Username").build()); when(secretForUsername.id()).thenReturn("username-secret-id"); when(secretForPassword.secretSpec()).thenReturn(SecretSpec.builder().name("Password").build()); when(secretForPassword.id()).thenReturn("password-secret-id"); final List<SecretBind> secretBinds = dockerSecrets.toSecretBind(asList(secretForUsername, secretForPassword)); assertThat(secretBinds, hasSize(2)); assertThat(secretBinds.get(0).secretName(), is("Username")); assertThat(secretBinds.get(0).secretId(), is("username-secret-id")); assertThat(secretBinds.get(0).file().name(), is("username")); assertThat(secretBinds.get(0).file().uid(), is("uid")); assertThat(secretBinds.get(0).file().gid(), is("gid")); assertThat(secretBinds.get(0).file().mode(), is(0640L)); assertThat(secretBinds.get(1).secretName(), is("Password")); assertThat(secretBinds.get(1).secretId(), is("password-secret-id")); assertThat(secretBinds.get(1).file().name(), is("passwd")); assertThat(secretBinds.get(1).file().uid(), is("uid")); assertThat(secretBinds.get(1).file().gid(), is("gid")); assertThat(secretBinds.get(1).file().mode(), is(0640L)); } @Test public void shouldBuildSecretBindFromDockerSecretAndUseDefaultsWhenNotProvided() { final DockerSecrets dockerSecrets = DockerSecrets.fromString("src=Username"); final Secret secret = mock(Secret.class); when(secret.secretSpec()).thenReturn(SecretSpec.builder().name("Username").build()); when(secret.id()).thenReturn("secret-id"); final List<SecretBind> secretBinds = dockerSecrets.toSecretBind(asList(secret)); assertThat(secretBinds, hasSize(1)); assertThat(secretBinds.get(0).secretName(), is("Username")); assertThat(secretBinds.get(0).secretId(), is("secret-id")); assertThat(secretBinds.get(0).file().name(), is("Username")); assertThat(secretBinds.get(0).file().uid(), is("0")); assertThat(secretBinds.get(0).file().gid(), is("0")); assertThat(secretBinds.get(0).file().mode(), is(0444L)); } @Test public void shouldErrorOutWhenSecretDoesNotExist() { final DockerSecrets dockerSecrets = DockerSecrets.fromString("src=Username\nsrc=Password"); final Secret secret = mock(Secret.class); when(secret.secretSpec()).thenReturn(SecretSpec.builder().name("Username").build()); when(secret.id()).thenReturn("secret-id"); thrown.expect(RuntimeException.class); thrown.expectMessage("Secret with name `Password` does not exist."); dockerSecrets.toSecretBind(asList(secret)); } @Test public void shouldErrorOutWhenSecretNameIsNotProvided() { thrown.expect(RuntimeException.class); thrown.expectMessage("Invalid secret specification `target=Username`. Must specify property `src` with value."); DockerSecrets.fromString("target=Username"); } @Test public void shouldErrorOutWhenModeIsInvalid() { thrown.expect(RuntimeException.class); thrown.expectMessage("Invalid mode value `0898` for secret `Username`. Mode value must be provided in octal."); DockerSecrets.fromString("src=Username, mode=0898").get(0).mode(); } }
apache-2.0
liaotmaster/hello-myfirst
Test.java
18
µÚÒ»´ÎÌá½»£¬Ã´Ã´´î
apache-2.0
forestoden6/LocationAndroidApp
app/src/androidTest/java/com/forestoden/locationservices/ExampleInstrumentedTest.java
766
package com.forestoden.locationservices; import android.content.Context; import android.support.test.InstrumentationRegistry; import android.support.test.runner.AndroidJUnit4; import org.junit.Test; import org.junit.runner.RunWith; import static org.junit.Assert.*; /** * Instrumentation test, which will execute on an Android device. * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ @RunWith(AndroidJUnit4.class) public class ExampleInstrumentedTest { @Test public void useAppContext() throws Exception { // Context of the app under test. Context appContext = InstrumentationRegistry.getTargetContext(); assertEquals("com.forestoden.locationservices", appContext.getPackageName()); } }
apache-2.0
jecelyin/920-text-editor-v2
app/src/main/java/com/jecelyin/editor/v2/adapter/RangeAdapter.java
3473
/* * Copyright (C) 2016 Jecelyin Peng <jecelyin@gmail.com> * * This file is part of 920 Text Editor. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jecelyin.editor.v2.adapter; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.TextView; import com.jecelyin.editor.v2.R; import java.util.HashMap; /** * @author Jecelyin Peng <jecelyin@gmail.com> */ public class RangeAdapter extends PreferenceAdapter<RangeAdapter.RangeViewHolder> { protected final int minValue; protected final int maxValue; private final CharSequence[] items; private final CharSequence[] values; public RangeAdapter(int min, int max, String format) { this.minValue = min; this.maxValue = max; int count = getItemCount(); items = new String[count]; values = new String[count]; for (int i = 0; i < count; i++) { int value = getValue(i); values[i] = String.valueOf(value); items[i] = format != null ? String.format(format, value) : String.valueOf(value); } } public CharSequence[] getItems() { return items; } public CharSequence[] getValues() { return values; } public int getValue(int position) { return minValue + position; } @Override public RangeViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { View view = LayoutInflater.from(parent.getContext()).inflate(getLayoutResId(), parent, false); RangeViewHolder vh = new RangeViewHolder(view); vh.mTitleTextView = (TextView) view.findViewById(getTextResId()); return vh; } @Override public void onBindViewHolder(final RangeViewHolder holder, final int position) { holder.mTitleTextView.setText(items[position]); setupTextView(holder.mTitleTextView, position); holder.itemView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (getOnItemClickListener() != null) { getOnItemClickListener().onItemClick(holder, position); } } }); } @Override public long getItemId(int position) { return 0; } @Override public int getItemCount() { return maxValue - minValue + 1; } protected int getLayoutResId() { return R.layout.md_listitem; } protected int getTextResId() { return R.id.md_title; } protected void setupTextView(TextView tv, int position) { } public static class RangeViewHolder extends RecyclerView.ViewHolder { public TextView mTitleTextView; public HashMap<Integer, View> mViewMap = new HashMap<>(); public RangeViewHolder(View itemView) { super(itemView); } } }
apache-2.0
datty-io/datty
datty-api/src/main/java/io/datty/spi/DattyDriver.java
3422
/* * Copyright (C) 2016 Datty.io Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package io.datty.spi; import java.util.List; import io.datty.api.Datty; import io.datty.api.DattyBatch; import io.datty.api.DattyKey; import io.datty.api.DattyResult; import io.datty.api.DattySingle; import io.datty.api.DattyStream; import io.datty.api.operation.RecordOperation; import io.datty.api.operation.SetOperation; import io.datty.api.operation.TypedOperation; import io.datty.api.result.RecordResult; import io.datty.api.result.TypedResult; import io.netty.buffer.ByteBuf; import rx.Observable; import rx.Single; /** * DattyDriver * * @author Alex Shvid * */ public class DattyDriver implements Datty { private final DattySingle single; private final DattyBatch batch; private final DattyStream stream; public DattyDriver(DattySingle single, DattyBatch batch, DattyStream stream) { this.single = single; this.batch = batch; this.stream = stream; } public static Builder newBuilder() { return new Builder(); } @Override public Observable<RecordResult> execute(SetOperation operation) { return single.execute(operation); } @Override public <O extends TypedOperation<O, R>, R extends TypedResult<O>> Single<R> execute(O operation) { return single.execute(operation); } @Override public <O extends TypedOperation<O, R>, R extends TypedResult<O>> Single<R> execute(Single<O> operation) { return single.execute(operation); } @Override public Single<List<DattyResult>> executeBatch(List<RecordOperation> operations) { return batch.executeBatch(operations); } @Override public Observable<DattyResult> executeSequence(Observable<RecordOperation> operations) { return batch.executeSequence(operations); } @Override public Observable<ByteBuf> streamOut(DattyKey key) { return stream.streamOut(key); } @Override public Single<Long> streamIn(DattyKey key, Observable<ByteBuf> value) { return stream.streamIn(key, value); } public static final class Builder { private DattySingle single; private DattyBatch batch; private DattyStream stream; public DattySingle getSingle() { return single; } public Builder setSingle(DattySingle single) { this.single = single; return this; } public DattyBatch getBatch() { return batch; } public Builder setBatch(DattyBatch batch) { this.batch = batch; return this; } public DattyStream getStream() { return stream; } public Builder setStream(DattyStream stream) { this.stream = stream; return this; } public DattyDriver build() { if (single == null) { throw new IllegalArgumentException("empty single"); } if (batch == null) { throw new IllegalArgumentException("empty batch"); } if (stream == null) { throw new IllegalArgumentException("empty stream"); } return new DattyDriver(single, batch, stream); } } }
apache-2.0
rafaelcoutinho/comendobemdelivery
src/br/copacabana/JsonDeleteCommand.java
323
package br.copacabana; import br.com.copacabana.cb.entities.mgr.Manager; public class JsonDeleteCommand extends PersistCommand { public JsonDeleteCommand(Object entity) { super(entity); } @Override public void execute(Manager manager) throws Exception { manager.delete(entity); } }
apache-2.0
WeTheInternet/collide
client/src/main/java/com/google/collide/client/search/awesomebox/host/AwesomeBoxComponent.java
2188
// Copyright 2012 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.collide.client.search.awesomebox.host; import elemental.dom.Element; /** * Defines the minimum interface exposed by an {@link AwesomeBoxComponentHost} * component. A component is one which is hosted within the AwesomeBox UI. * Examples include find/replace, the awesomebox itself, and the checkpoint ui. */ public interface AwesomeBoxComponent { /** * Defines how the component host hides this component. */ public enum HideMode { /** * The component will autohide when the user clicks outside of the * {@link AwesomeBoxComponentHost} or the actual input loses focus. */ AUTOHIDE, /** The component must be manually closed or programatically closed. */ NO_AUTOHIDE, } public enum HiddenBehavior { /** The component will stay active when hidden. */ STAY_ACTIVE, /** * When hidden, this current component hosted by the * {@link AwesomeBoxComponentHost} will revert to the default component. */ REVERT_TO_DEFAULT } public enum ShowReason { /** Indicates the component is being shown due to a click event */ CLICK, /** Indicates the component is being shown programatically */ OTHER } HideMode getHideMode(); HiddenBehavior getHiddenBehavior(); String getPlaceHolderText(); String getTooltipText(); Element getElement(); /** * Called when the component should steal focus, guaranteed to be called * immediately after onShow. */ void focus(); void onShow(ComponentHost host, ShowReason reason); void onHide(); }
apache-2.0
ludovicc/testng-debian
src/main/org/testng/internal/AnnotationDirectoryConverter.java
1552
package org.testng.internal; import java.io.File; import java.util.HashMap; import java.util.Map; /** * Scans the directory and its subdirectories for java files to convert to TestNG format * @author micheb10 12-Sep-2006 * @since 5.3 */ public class AnnotationDirectoryConverter { private File m_sourceDir; private File m_outDir; private Map<File, File> m_fileNames; /** * @param sourceDirectory * @param destinationDirectory */ public AnnotationDirectoryConverter(File sourceDirectory, File destinationDirectory) { m_sourceDir= sourceDirectory; m_outDir= destinationDirectory; } /** * @return */ public int convert() { // // Convert annotations // m_fileNames= convert(m_sourceDir); File[] files= m_fileNames.keySet().toArray(new File[m_fileNames.size()]); AnnotationTestConverter fc= new AnnotationTestConverter(files, m_outDir); int converted= fc.convert(); return converted; } private boolean isTestFile(File f) { return f.getName().endsWith(".java"); } private Map<File, File> convert(File f) { Map<File, File> result= new HashMap<File, File>(); if(f.isDirectory()) { File[] files= f.listFiles(); for(File file : files) { File f2= file.getAbsoluteFile(); Map<File, File> others= convert(f2); result.putAll(others); } } else { if(isTestFile(f)) { result.put(f, f); } } return result; } }
apache-2.0
Zappos/zappos-json
src/main/java/com/zappos/json/util/Reflections.java
4899
/* * Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0/ * * or in the "license" file accompanying this file. This file is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS * OF ANY KIND, either express or implied. See the License for the specific * language governing permissions and limitations under the License. */ package com.zappos.json.util; import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.List; /** * * @author Hussachai Puripunpinyo * */ public final class Reflections { public static boolean classPresent(String className) { ClassLoader thisClassLoader = Reflections.class.getClassLoader(); try { Class.forName(className, false, thisClassLoader); return true; } catch (ClassNotFoundException e) { return false; } } /** * Recursively find the field by name up to the top of class hierarchy. * * @param clazz the class object * @param fieldName declared field name of specified class * @return the field object */ public static Field getField(Class<?> clazz, String fieldName) throws NoSuchFieldException { if (clazz == Object.class) { return null; } try { Field field = clazz.getDeclaredField(fieldName); return field; } catch (NoSuchFieldException e) { return getField(clazz.getSuperclass(), fieldName); } } public static boolean hasAnnotation(Field field, Class<? extends Annotation> annotationClass) { if (field != null) { return field.isAnnotationPresent(annotationClass); } return false; } public static boolean hasAnnotation(Method method, Class<? extends Annotation> annotationClass) { if (method != null) { return method.isAnnotationPresent(annotationClass); } return false; } public static boolean hasAnnotation(Method method, Field field, Class<? extends Annotation> annotationClass) { return hasAnnotation(field, annotationClass) || hasAnnotation(method, annotationClass); } public static <T extends Annotation> T getAnnotation(Method method, Field field, Class<T> annotationClass) { T annot = null; if (field != null) { annot = field.getAnnotation(annotationClass); } if (annot == null && method != null) { annot = method.getAnnotation(annotationClass); } return annot; } public static Class<?> getFirstGenericParameterType(Method method) { Class<?> types[] = getGenericParameterTypes(method); if (types != null && types.length > 0) { return types[0]; } return null; } public static Class<?> getSecondGenericParameterType(Method method) { Class<?> types[] = getGenericParameterTypes(method); if (types != null && types.length > 1) { return types[1]; } return null; } public static Class<?>[] getGenericParameterTypes(Method method) { Type argTypes[] = method.getGenericParameterTypes(); for (Type argType : argTypes) { if (argType instanceof ParameterizedType) { Type typeArgs[] = ((ParameterizedType) argType) .getActualTypeArguments(); if (typeArgs.length > 0) { List<Class<?>> genericTypes = new ArrayList<>(); for (Type typeArg : typeArgs) { if (typeArg instanceof Class<?>) { genericTypes.add((Class<?>) typeArg); } } return genericTypes.toArray(new Class[0]); } } } return null; } public static Class<?> getFirstGenericType(Field field) { Class<?> types[] = getGenericTypes(field); if (types != null && types.length > 0) { return types[0]; } return null; } public static Class<?> getSecondGenericType(Field field) { Class<?> types[] = getGenericTypes(field); if (types != null && types.length > 1) { return types[1]; } return null; } public static Class<?>[] getGenericTypes(Field field) { Type fieldType = field.getGenericType(); if (fieldType instanceof ParameterizedType) { Type typeArgs[] = ((ParameterizedType) fieldType) .getActualTypeArguments(); if (typeArgs.length > 0) { List<Class<?>> genericTypes = new ArrayList<>(); for (Type typeArg : typeArgs) { if (typeArg instanceof Class<?>) { genericTypes.add((Class<?>) typeArg); } } return genericTypes.toArray(new Class[0]); } } return null; } }
apache-2.0
tfredrich/ACL-Aid
src/main/java/com/strategicgains/aclaid/impl/PermissionImpl.java
1102
/* Copyright 2016, Strategic Gains, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.strategicgains.aclaid.impl; import com.strategicgains.aclaid.Permission; /** * @author toddf * @since Feb 24, 2016 */ public class PermissionImpl implements Permission { private String permissionId; public PermissionImpl() { super(); } public PermissionImpl(String permissionId) { this(); setPermissionId(permissionId); } @Override public String getPermissionId() { return permissionId; } public void setPermissionId(String permissionId) { this.permissionId = permissionId; } }
apache-2.0
bencaldwell/opc-ua-stack
stack-core/src/main/java/com/digitalpetri/opcua/stack/core/serialization/DataTypeEncoding.java
872
package com.digitalpetri.opcua.stack.core.serialization; import com.digitalpetri.opcua.stack.core.UaSerializationException; import com.digitalpetri.opcua.stack.core.types.builtin.ByteString; import com.digitalpetri.opcua.stack.core.types.builtin.NodeId; import com.digitalpetri.opcua.stack.core.types.builtin.XmlElement; public interface DataTypeEncoding { public static final DataTypeEncoding OPC_UA = new OpcUaDataTypeEncoding(); ByteString encodeToByteString(Object object, NodeId encodingTypeId) throws UaSerializationException; Object decodeFromByteString(ByteString encoded, NodeId encodingTypeId) throws UaSerializationException; XmlElement encodeToXmlElement(Object object, NodeId encodingTypeId) throws UaSerializationException; Object decodeFromXmlElement(XmlElement encoded, NodeId encodingTypeId) throws UaSerializationException; }
apache-2.0