repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15
values |
|---|---|---|---|---|
srijeyanthan/hops | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/MemoryTimelineStore.java | 13077 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents;
import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity;
import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.PriorityQueue;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
/**
* In-memory implementation of {@link TimelineStore}. This
* implementation is for test purpose only. If users improperly instantiate it,
* they may encounter reading and writing history data in different memory
* store.
*/
@Private
@Unstable
public class MemoryTimelineStore extends AbstractService
implements TimelineStore {
private Map<EntityIdentifier, TimelineEntity> entities =
new HashMap<EntityIdentifier, TimelineEntity>();
private Map<EntityIdentifier, Long> entityInsertTimes =
new HashMap<EntityIdentifier, Long>();
public MemoryTimelineStore() {
super(MemoryTimelineStore.class.getName());
}
@Override
public TimelineEntities getEntities(String entityType, Long limit,
Long windowStart, Long windowEnd, String fromId, Long fromTs,
NameValuePair primaryFilter, Collection<NameValuePair> secondaryFilters,
EnumSet<Field> fields) {
if (limit == null) {
limit = DEFAULT_LIMIT;
}
if (windowStart == null) {
windowStart = Long.MIN_VALUE;
}
if (windowEnd == null) {
windowEnd = Long.MAX_VALUE;
}
if (fields == null) {
fields = EnumSet.allOf(Field.class);
}
Iterator<TimelineEntity> entityIterator = null;
if (fromId != null) {
TimelineEntity firstEntity =
entities.get(new EntityIdentifier(fromId, entityType));
if (firstEntity == null) {
return new TimelineEntities();
} else {
entityIterator = new TreeSet<TimelineEntity>(entities.values())
.tailSet(firstEntity, true).iterator();
}
}
if (entityIterator == null) {
entityIterator =
new PriorityQueue<TimelineEntity>(entities.values()).iterator();
}
List<TimelineEntity> entitiesSelected = new ArrayList<TimelineEntity>();
while (entityIterator.hasNext()) {
TimelineEntity entity = entityIterator.next();
if (entitiesSelected.size() >= limit) {
break;
}
if (!entity.getEntityType().equals(entityType)) {
continue;
}
if (entity.getStartTime() <= windowStart) {
continue;
}
if (entity.getStartTime() > windowEnd) {
continue;
}
if (fromTs != null && entityInsertTimes.get(
new EntityIdentifier(entity.getEntityId(), entity.getEntityType())) >
fromTs) {
continue;
}
if (primaryFilter != null &&
!matchPrimaryFilter(entity.getPrimaryFilters(), primaryFilter)) {
continue;
}
if (secondaryFilters != null) { // AND logic
boolean flag = true;
for (NameValuePair secondaryFilter : secondaryFilters) {
if (secondaryFilter != null &&
!matchPrimaryFilter(entity.getPrimaryFilters(),
secondaryFilter) &&
!matchFilter(entity.getOtherInfo(), secondaryFilter)) {
flag = false;
break;
}
}
if (!flag) {
continue;
}
}
entitiesSelected.add(entity);
}
List<TimelineEntity> entitiesToReturn = new ArrayList<TimelineEntity>();
for (TimelineEntity entitySelected : entitiesSelected) {
entitiesToReturn.add(maskFields(entitySelected, fields));
}
Collections.sort(entitiesToReturn);
TimelineEntities entitiesWrapper = new TimelineEntities();
entitiesWrapper.setEntities(entitiesToReturn);
return entitiesWrapper;
}
@Override
public TimelineEntity getEntity(String entityId, String entityType,
EnumSet<Field> fieldsToRetrieve) {
if (fieldsToRetrieve == null) {
fieldsToRetrieve = EnumSet.allOf(Field.class);
}
TimelineEntity entity =
entities.get(new EntityIdentifier(entityId, entityType));
if (entity == null) {
return null;
} else {
return maskFields(entity, fieldsToRetrieve);
}
}
@Override
public TimelineEvents getEntityTimelines(String entityType,
SortedSet<String> entityIds, Long limit, Long windowStart, Long windowEnd,
Set<String> eventTypes) {
TimelineEvents allEvents = new TimelineEvents();
if (entityIds == null) {
return allEvents;
}
if (limit == null) {
limit = DEFAULT_LIMIT;
}
if (windowStart == null) {
windowStart = Long.MIN_VALUE;
}
if (windowEnd == null) {
windowEnd = Long.MAX_VALUE;
}
for (String entityId : entityIds) {
EntityIdentifier entityID = new EntityIdentifier(entityId, entityType);
TimelineEntity entity = entities.get(entityID);
if (entity == null) {
continue;
}
EventsOfOneEntity events = new EventsOfOneEntity();
events.setEntityId(entityId);
events.setEntityType(entityType);
for (TimelineEvent event : entity.getEvents()) {
if (events.getEvents().size() >= limit) {
break;
}
if (event.getTimestamp() <= windowStart) {
continue;
}
if (event.getTimestamp() > windowEnd) {
continue;
}
if (eventTypes != null && !eventTypes.contains(event.getEventType())) {
continue;
}
events.addEvent(event);
}
allEvents.addEvent(events);
}
return allEvents;
}
@Override
public TimelinePutResponse put(TimelineEntities data) {
TimelinePutResponse response = new TimelinePutResponse();
for (TimelineEntity entity : data.getEntities()) {
EntityIdentifier entityId =
new EntityIdentifier(entity.getEntityId(), entity.getEntityType());
// store entity info in memory
TimelineEntity existingEntity = entities.get(entityId);
if (existingEntity == null) {
existingEntity = new TimelineEntity();
existingEntity.setEntityId(entity.getEntityId());
existingEntity.setEntityType(entity.getEntityType());
existingEntity.setStartTime(entity.getStartTime());
entities.put(entityId, existingEntity);
entityInsertTimes.put(entityId, System.currentTimeMillis());
}
if (entity.getEvents() != null) {
if (existingEntity.getEvents() == null) {
existingEntity.setEvents(entity.getEvents());
} else {
existingEntity.addEvents(entity.getEvents());
}
Collections.sort(existingEntity.getEvents());
}
// check startTime
if (existingEntity.getStartTime() == null) {
if (existingEntity.getEvents() == null ||
existingEntity.getEvents().isEmpty()) {
TimelinePutError error = new TimelinePutError();
error.setEntityId(entityId.getId());
error.setEntityType(entityId.getType());
error.setErrorCode(TimelinePutError.NO_START_TIME);
response.addError(error);
entities.remove(entityId);
entityInsertTimes.remove(entityId);
continue;
} else {
Long min = Long.MAX_VALUE;
for (TimelineEvent e : entity.getEvents()) {
if (min > e.getTimestamp()) {
min = e.getTimestamp();
}
}
existingEntity.setStartTime(min);
}
}
if (entity.getPrimaryFilters() != null) {
if (existingEntity.getPrimaryFilters() == null) {
existingEntity.setPrimaryFilters(new HashMap<String, Set<Object>>());
}
for (Entry<String, Set<Object>> pf : entity.getPrimaryFilters()
.entrySet()) {
for (Object pfo : pf.getValue()) {
existingEntity.addPrimaryFilter(pf.getKey(), maybeConvert(pfo));
}
}
}
if (entity.getOtherInfo() != null) {
if (existingEntity.getOtherInfo() == null) {
existingEntity.setOtherInfo(new HashMap<String, Object>());
}
for (Entry<String, Object> info : entity.getOtherInfo().entrySet()) {
existingEntity
.addOtherInfo(info.getKey(), maybeConvert(info.getValue()));
}
}
// relate it to other entities
if (entity.getRelatedEntities() == null) {
continue;
}
for (Map.Entry<String, Set<String>> partRelatedEntities : entity
.getRelatedEntities().entrySet()) {
if (partRelatedEntities == null) {
continue;
}
for (String idStr : partRelatedEntities.getValue()) {
EntityIdentifier relatedEntityId =
new EntityIdentifier(idStr, partRelatedEntities.getKey());
TimelineEntity relatedEntity = entities.get(relatedEntityId);
if (relatedEntity != null) {
relatedEntity.addRelatedEntity(existingEntity.getEntityType(),
existingEntity.getEntityId());
} else {
relatedEntity = new TimelineEntity();
relatedEntity.setEntityId(relatedEntityId.getId());
relatedEntity.setEntityType(relatedEntityId.getType());
relatedEntity.setStartTime(existingEntity.getStartTime());
relatedEntity.addRelatedEntity(existingEntity.getEntityType(),
existingEntity.getEntityId());
entities.put(relatedEntityId, relatedEntity);
entityInsertTimes.put(relatedEntityId, System.currentTimeMillis());
}
}
}
}
return response;
}
private static TimelineEntity maskFields(TimelineEntity entity,
EnumSet<Field> fields) {
// Conceal the fields that are not going to be exposed
TimelineEntity entityToReturn = new TimelineEntity();
entityToReturn.setEntityId(entity.getEntityId());
entityToReturn.setEntityType(entity.getEntityType());
entityToReturn.setStartTime(entity.getStartTime());
entityToReturn.setEvents(
fields.contains(Field.EVENTS) ? entity.getEvents() :
fields.contains(Field.LAST_EVENT_ONLY) ?
Arrays.asList(entity.getEvents().get(0)) : null);
entityToReturn.setRelatedEntities(
fields.contains(Field.RELATED_ENTITIES) ? entity.getRelatedEntities() :
null);
entityToReturn.setPrimaryFilters(
fields.contains(Field.PRIMARY_FILTERS) ? entity.getPrimaryFilters() :
null);
entityToReturn.setOtherInfo(
fields.contains(Field.OTHER_INFO) ? entity.getOtherInfo() : null);
return entityToReturn;
}
private static boolean matchFilter(Map<String, Object> tags,
NameValuePair filter) {
Object value = tags.get(filter.getName());
if (value == null) { // doesn't have the filter
return false;
} else if (!value.equals(filter.getValue())) { // doesn't match the filter
return false;
}
return true;
}
private static boolean matchPrimaryFilter(Map<String, Set<Object>> tags,
NameValuePair filter) {
Set<Object> value = tags.get(filter.getName());
if (value == null) { // doesn't have the filter
return false;
} else {
return value.contains(filter.getValue());
}
}
private static Object maybeConvert(Object o) {
if (o instanceof Long) {
Long l = (Long) o;
if (l >= Integer.MIN_VALUE && l <= Integer.MAX_VALUE) {
return l.intValue();
}
}
return o;
}
}
| apache-2.0 |
muhd7rosli/desmoj | src/desmoj/extensions/applicationDomains/production/RestockProcessQT.java | 17583 | package desmoj.extensions.applicationDomains.production;
import desmoj.core.advancedModellingFeatures.Stock;
import desmoj.core.dist.NumericalDist;
import desmoj.core.simulator.Model;
import desmoj.core.simulator.SimProcess;
import desmoj.core.simulator.SimTime;
import desmoj.core.simulator.TimeSpan;
/**
* RestockProcessQT is a process restocking a <code>Stock</code> with a fixed
* quantity (Q) of units on a periodic review bases (fixed Time span = T).
*
* @version DESMO-J, Ver. 2.4.1 copyright (c) 2014
* @author Soenke Claassen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You
* may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS"
* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing
* permissions and limitations under the License.
*
*/
public class RestockProcessQT extends SimProcess {
/**
* The fixed quantity of units the <code>Stock</code> will be replenished
* with.
*/
private long orderQuantity;
/**
* The fixed time span after which the inventory will be reviewed and orders
* will be placed.
*/
private TimeSpan reviewSpan;
/**
* The client <code>Stock</code> which will be supplied by this
* <code>RestockProcess</code>.
*/
private Stock clientStock;
/**
* The random distribution stream determining the lead time. That is the
* time between the placement and receipt of an order. If <code>null</code>
* lead time is zero.
*/
private NumericalDist<?> leadTime;
/**
* Constructs a <code>RestockProcessQT</code> which restocks a client
* <code>Stock</code> after a fixed time period with a fixed quantity of
* units. The lead time (time gap between placement and receipt of an order)
* will be given as a real random number distribution.
*
* @param owner
* Model : The model this <code>RestockProcessQT</code> is
* associated to
* @param name
* java.lang.String : The name of the
* <code>RestockProcessQT</code>
* @param q
* long : The quantity supplied to the Stock with every order.
* @param t
* TimeSpan : The time period between the inventory reviews (in
* this case also the placements of the orders).
* @param client
* Stock : The <code>Stock</code> which will replenished.
* @param lt
* NumericalDist<?> : The lead time random number
* distribution to determine the time between placement and
* receipt of an order. If <code>null</code> the lead time is
* zero.
* @param showInTrace
* boolean : Flag for showing the <code>RestockProcessQT</code>
* in trace-files. Set it to <code>true</code> if
* RestockProcessQT should show up in trace. Set it to
* <code>false</code> if RestockProcessQT should not be shown
* in trace.
*/
public RestockProcessQT(Model owner, String name, long q, TimeSpan t,
Stock client, NumericalDist<?> lt, boolean showInTrace) {
super(owner, name, true, showInTrace); // make a sim-process
this.orderQuantity = q;
this.reviewSpan = t;
this.clientStock = client;
this.leadTime = lt;
// check the quantity parameter
if (q <= 0) {
sendWarning(
"The given quantity parameter is negative or zero!"
+ "The quantity will be set to one!",
"RestockProcessQT : "
+ getName()
+ " Constructor: RestockProcessQT(Model owner, String name, "
+ "long q, SimTime t, Stock client, RealDist lt, boolean "
+ "showInTrace)",
"A negative or zero quantity for an order does not make "
+ "sense.",
"Make sure to provide a valid positive integer number "
+ "for the quantity of every order.");
// set the quantity to at least one
this.orderQuantity = 1; // better than nothing
}
// check the review period parameter
if (t == null) {
sendWarning(
"The given review period parameter is only a null pointer!"
+ "The review period will be set to 100!",
"RestockProcessQT : "
+ getName()
+ " Constructor: RestockProcessQT(Model owner, String name, "
+ "long q, SimTime t, Stock client, RealDist lt, boolean "
+ "showInTrace)",
"A non existing review period does not make sense.",
"Make sure to provide a valid SimTime object as review "
+ "period.");
// set the review period to 100 (or 42 ?!?)
this.reviewSpan = new TimeSpan(100); // better than nothing
}
// check the client Stock parameter
if (client == null) {
sendWarning(
"The given client parameter is only a null pointer!",
"RestockProcessQT : "
+ getName()
+ " Constructor: RestockProcessQT(Model owner, String name, "
+ "long q, TimeSpan t, Stock client, RealDist lt, boolean "
+ "showInTrace)",
"The RestockProcessQT does not know which Stock to replenish "
+ "and therefore is useless.",
"Make sure to provide a valid Stock object which should "
+ "be replenished by this RestockProcessQT.");
}
}
/**
* Constructs a <code>RestockProcessQT</code> which restocks a client
* <code>Stock</code> after a fixed time period with a fixed quantity of
* units. The lead time (time gap between placement and receipt of an order)
* will be given as a real random number distribution.
*
* @param owner
* Model : The model this <code>RestockProcessQT</code> is
* associated to
* @param name
* java.lang.String : The name of the
* <code>RestockProcessQT</code>
* @param q
* long : The quantity supplied to the Stock with every order.
* @param t
* SimTime : The time period between the inventory reviews (in
* this case also the placements of the orders).
* @param client
* Stock : The <code>Stock</code> which will replenished.
* @param lt
* NumericalDist<?> : The lead time random number
* distribution to determine the time between placement and
* receipt of an order. If <code>null</code> the lead time is
* zero.
* @param showInTrace
* boolean : Flag for showing the <code>RestockProcessQT</code>
* in trace-files. Set it to <code>true</code> if
* RestockProcessQT should show up in trace. Set it to
* <code>false</code> if RestockProcessQT should not be shown
* in trace.
*
* @deprecated Type of t (SimTime) to be replaced with TimeInstant.
*/
public RestockProcessQT(Model owner, String name, long q, SimTime t,
Stock client, NumericalDist<Double> lt, boolean showInTrace) {
this(owner, name, q, SimTime.toTimeSpan(t), client, lt, showInTrace);
}
/**
* Constructs a <code>RestockProcessQT</code> which restocks a client
* <code>Stock</code> after a fixed time period with a fixed quantity of
* units. The lead time is zero.
*
* @param owner
* Model : The model this <code>RestockProcessQT</code> is
* associated to
* @param name
* java.lang.String : The name of the
* <code>RestockProcessQT</code>
* @param q
* long : The quantity supplied to the Stock with every order.
* @param t
* TimeSpan : The time period between the inventory reviews (in
* this case also the placements of the orders).
* @param client
* Stock : The <code>Stock</code> which will replenished.
* @param showInTrace
* boolean : Flag for showing the <code>RestockProcessQT</code>
* in trace-files. Set it to <code>true</code> if
* RestockProcessQT should show up in trace. Set it to
* <code>false</code> if RestockProcessQT should not be shown
* in trace.
*/
public RestockProcessQT(Model owner, String name, long q, TimeSpan t,
Stock client, boolean showInTrace) {
super(owner, name, true, showInTrace); // make a sim-process
this.orderQuantity = q;
this.reviewSpan = t;
this.clientStock = client;
this.leadTime = null;
// check the quantity parameter
if (q <= 0) {
sendWarning(
"The given quantity parameter is negative or zero!"
+ "The quantity will be set to one!",
"RestockProcessQT : "
+ getName()
+ " Constructor: RestockProcessQT(Model owner, String name, "
+ "long q, TimeSpan t, Stock client, boolean showInTrace)",
"A negative or zero quantity for an order does not make "
+ "sense.",
"Make sure to provide a valid positive integer number "
+ "for the quantity of every order.");
// set the quantity to at least one
this.orderQuantity = 1; // better than nothing
}
// check the review period parameter
if (t == null) {
sendWarning(
"The given review period parameter is only a null pointer!"
+ "The review period will be set to 100!",
"RestockProcessQT : "
+ getName()
+ " Constructor: RestockProcessQT(Model owner, String name, "
+ "long q, TimeSpan t, Stock client, boolean showInTrace)",
"A non existing review period does not make sense.",
"Make sure to provide a valid TimeSpan object as review "
+ "period.");
// set the review period to 100 (or 42 ?!?)
this.reviewSpan = new TimeSpan(100); // better than nothing
}
// check the client Stock parameter
if (client == null) {
sendWarning(
"The given client parameter is only a null pointer!",
"RestockProcessQT : "
+ getName()
+ " Constructor: RestockProcessQT(Model owner, String name, "
+ "long q, TimeSpan t, Stock client, boolean showInTrace)",
"The RestockProcessQT does not know which Stock to replenish "
+ "and therefore is useless.",
"Make sure to provide a valid Stock object which should "
+ "be replenished by this RestockProcessQT.");
}
}
/**
* Constructs a <code>RestockProcessQT</code> which restocks a client
* <code>Stock</code> after a fixed time period with a fixed quantity of
* units. The lead time is zero.
*
* @param owner
* Model : The model this <code>RestockProcessQT</code> is
* associated to
* @param name
* java.lang.String : The name of the
* <code>RestockProcessQT</code>
* @param q
* long : The quantity supplied to the Stock with every order.
* @param t
* SimTime : The time period between the inventory reviews (in
* this case also the placements of the orders).
* @param client
* Stock : The <code>Stock</code> which will replenished.
* @param showInTrace
* boolean : Flag for showing the <code>RestockProcessQT</code>
* in trace-files. Set it to <code>true</code> if
* RestockProcessQT should show up in trace. Set it to
* <code>false</code> if RestockProcessQT should not be shown
* in trace.
*
* @deprecated Type of t (SimTime) to be replaced with TimeInstant.
*
*/
public RestockProcessQT(Model owner, String name, long q, SimTime t,
Stock client, boolean showInTrace) {
this(owner, name, q, SimTime.toTimeSpan(t), client, showInTrace);
}
/**
* Returns the random number distribution for the lead time (time between
* placement and receipt of an order).
*
* @return NumericalDist<?> : The random number distribution for the
* lead time (time between placement and receipt of an order).
*/
public NumericalDist<?> getLeadTime() {
return leadTime;
}
/**
* Returns the quantity (number of units) to be stored in the Stock.
*
* @return long : The Stock will be replenished with this number of units.
*/
public long getOrderQuantity() {
return orderQuantity;
}
/**
* Returns the time (as a SimTime object) between every replenishment of the
* Stock.
*
* @return SimTime : The time (as a SimTime object) between every
* replenishment of the Stock.
*
* @deprecated Replaced by getReviewSpan().
*/
public SimTime getReviewPeriod() {
return SimTime.toSimTime(reviewSpan);
}
/**
* Returns the time span between every replenishment of the
* Stock.
*
* @return TimeSpan : The time (as a SimTime object) between every
* replenishment of the Stock.
*/
public TimeSpan getReviewSpan() {
return reviewSpan;
}
/**
* The <code>RestockProcessQT</code> replenishes the associated
* <code>Stock</code> with the given quantity (Q) every period (T).
*/
public void lifeCycle() {
// place order (and tell so in the debug file)
if (currentlySendTraceNotes()) {
sendTraceNote("places an order over " + orderQuantity
+ " units for " + "Stock "
+ clientStock.getQuotedName());
}
// wait the lead time if necessary
if (leadTime != null) {
double leadDuration = leadTime.sample().doubleValue();
// check lead duration non-negative
if (leadDuration < 0) {
sendWarning(
"Lead duration distribution sample is negative (" + leadDuration + "). Assuming"
+ " immediate delivery instead (i.e. duration 0).",
"RestockProcessQT : "
+ getName()
+ " lifeCycle()",
"The given lead time distribution " + leadTime.getName()
+ " has returned a negative sample.",
"Make sure to use a non-negativ lead time distribution."
+ " Distributions potentially yielding negative values"
+ " (like Normal distributions) should bet set to non-negative.");
// set lead duration to 0
leadDuration = 0;
}
hold(new TimeSpan(leadDuration));
}
// store the ordered quantity in the Stock
clientStock.store(orderQuantity);
// wait until start of the next period
hold(reviewSpan);
}
/**
* Set the lead time to a new real random number distribution. If set to
* <code>null</code> the lead time is zero.
*
* @param newLeadTime
* desmoj.dist.RealDist : The new real random number distribution
* determining the lead time.
*/
public void setLeadTime(desmoj.core.dist.NumericalDist<Double> newLeadTime) {
leadTime = newLeadTime;
}
/**
* Sets the order quantity to a new value. Zero or negative values will be
* rejected.
*
* @param newOrderQuantity
* long : Choose a postitive value greater than zero as new order
* quantity.
*/
public void setOrderQuantity(long newOrderQuantity) {
if (newOrderQuantity <= 0) {
sendWarning(
"The given quantity parameter is negative or zero!"
+ "The order quantity will remain unchanged!",
"RestockProcessQT : "
+ getName()
+ " Method: void setOrderQuantity(long newOrderQuantity)",
"A negative or zero quantity for an order does not make "
+ "sense.",
"Make sure to provide a valid positive integer number "
+ "for the quantity of every order.");
return; // do nothing, just return. ignore that rubbish
}
orderQuantity = newOrderQuantity;
}
/**
* Sets the review period to a new value.
*
* @param newReviewPeriod
* desmoj.SimTime : The new value for the review period.
* <code>null</code> will be rejected.
*/
public void setReviewSpan(TimeSpan newReviewSpan) {
if (newReviewSpan == null) {
sendWarning(
"The given review period parameter is only a null pointer!"
+ "The review period will remain unchanged!",
"RestockProcessQT : "
+ getName()
+ " Method: void setReviewSpan(TimeSpan newReviewSpan)",
"A null pointer or a time span with zero length does not "
+ "make sense as a review period.",
"Make sure to provide a valid TimeSpan object as parameter "
+ "for the review period.");
return; // do nothing, just return. ignore that rubbish
}
reviewSpan = newReviewSpan;
}
/**
* Sets the review period to a new value.
*
* @param newReviewPeriod
* desmoj.SimTime : The new value for the review period.
* <code>null</code> will be rejected.
*
* @deprecated Replaced by setReviewSpan(TimeSpan newReviewSpan).
*/
public void setReviewPeriod(SimTime newReviewPeriod) {
this.setReviewSpan(SimTime.toTimeSpan(newReviewPeriod));
}
} | apache-2.0 |
ppavlidis/Gemma | gemma-core/src/main/java/ubic/gemma/persistence/service/expression/bioAssay/BioAssayDaoImpl.java | 8133 | /*
* The Gemma project.
*
* Copyright (c) 2006 University of British Columbia
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package ubic.gemma.persistence.service.expression.bioAssay;
import org.apache.commons.lang3.StringUtils;
import org.hibernate.Criteria;
import org.hibernate.Hibernate;
import org.hibernate.LockOptions;
import org.hibernate.SessionFactory;
import org.hibernate.jdbc.Work;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
import ubic.gemma.model.expression.bioAssay.BioAssay;
import ubic.gemma.model.expression.bioAssay.BioAssayValueObject;
import ubic.gemma.model.expression.bioAssayData.BioAssayDimension;
import ubic.gemma.model.expression.biomaterial.BioMaterial;
import ubic.gemma.persistence.service.AbstractDao;
import ubic.gemma.persistence.service.AbstractVoEnabledDao;
import ubic.gemma.persistence.util.BusinessKey;
import ubic.gemma.persistence.util.EntityUtils;
import java.sql.Connection;
import java.util.Collection;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
/**
* @author pavlidis
*/
@Repository
public class BioAssayDaoImpl extends AbstractVoEnabledDao<BioAssay, BioAssayValueObject> implements BioAssayDao {
@Autowired
public BioAssayDaoImpl( SessionFactory sessionFactory ) {
super( BioAssay.class, sessionFactory );
}
@Override
public Collection<BioAssay> create( final Collection<BioAssay> entities ) {
this.getSessionFactory().getCurrentSession().doWork( new Work() {
@Override
public void execute( Connection connection ) {
for ( BioAssay entity : entities ) {
BioAssayDaoImpl.this.create( entity );
}
}
} );
return entities;
}
@Override
public void update( final Collection<BioAssay> entities ) {
this.getSessionFactory().getCurrentSession().doWork( new Work() {
@Override
public void execute( Connection connection ) {
for ( BioAssay entity : entities ) {
BioAssayDaoImpl.this.update( entity );
}
}
} );
}
@Override
public BioAssay find( BioAssay bioAssay ) {
try {
Criteria queryObject = BusinessKey
.createQueryObject( this.getSessionFactory().getCurrentSession(), bioAssay );
List<?> results = queryObject.list();
Object result = null;
if ( results != null ) {
if ( results.size() > 1 ) {
throw new org.springframework.dao.InvalidDataAccessResourceUsageException(
"More than one instance of '" + BioAssay.class.getName()
+ "' was found when executing query" );
} else if ( results.size() == 1 ) {
result = results.iterator().next();
}
}
return ( BioAssay ) result;
} catch ( org.hibernate.HibernateException ex ) {
throw super.convertHibernateAccessException( ex );
}
}
@Override
public BioAssay findOrCreate( BioAssay bioAssay ) {
BioAssay newBioAssay = this.find( bioAssay );
if ( newBioAssay != null ) {
if ( AbstractDao.log.isDebugEnabled() )
AbstractDao.log.debug( "Found existing bioAssay: " + newBioAssay );
return newBioAssay;
}
if ( AbstractDao.log.isDebugEnabled() )
AbstractDao.log.debug( "Creating new bioAssay: " + bioAssay );
return this.create( bioAssay );
}
@Override
public Collection<BioAssayDimension> findBioAssayDimensions( BioAssay bioAssay ) {
//noinspection unchecked
return this.getSessionFactory().getCurrentSession().createQuery(
"select bad from BioAssayDimension bad inner join bad.bioAssays as ba where :bioAssay in ba " )
.setParameter( "bioAssay", bioAssay ).list();
}
@Override
public Collection<BioAssay> findByAccession( String accession ) {
if ( StringUtils.isBlank( accession ) )
return new HashSet<>();
//noinspection unchecked
return this.getSessionFactory().getCurrentSession().createQuery(
"select distinct b from BioAssay b inner join b.accession a where a.accession = :accession" )
.setParameter( "accession", accession ).list();
}
@Override
public void thaw( final BioAssay bioAssay ) {
try {
this.getSessionFactory().getCurrentSession().doWork( new Work() {
@Override
public void execute( Connection connection ) {
BioAssayDaoImpl.this.getSession().buildLockRequest( LockOptions.NONE ).lock( bioAssay );
Hibernate.initialize( bioAssay.getArrayDesignUsed() );
Hibernate.initialize( bioAssay.getOriginalPlatform() );
BioMaterial bm = bioAssay.getSampleUsed();
BioAssayDaoImpl.this.getSession().buildLockRequest( LockOptions.NONE ).lock( bm );
Hibernate.initialize( bm );
Hibernate.initialize( bm.getBioAssaysUsedIn() );
Hibernate.initialize( bm.getFactorValues() );
BioAssayDaoImpl.this.getSession().evict( bm );
BioAssayDaoImpl.this.getSession().evict( bioAssay );
}
} );
} catch ( Throwable th ) {
throw new RuntimeException(
"Error performing 'BioAssayDao.thawRawAndProcessed(BioAssay bioAssay)' --> " + th, th );
}
}
@Override
public Collection<BioAssay> thaw( Collection<BioAssay> bioAssays ) {
if ( bioAssays.isEmpty() )
return bioAssays;
List<?> thawedBioassays = this.getHibernateTemplate().findByNamedParam(
"select distinct b from BioAssay b left join fetch b.arrayDesignUsed"
+ " left join fetch b.sampleUsed bm"
+ " left join bm.factorValues left join bm.bioAssaysUsedIn where b.id in (:ids) ",
"ids",
EntityUtils.getIds( bioAssays ) );
//noinspection unchecked
return ( Collection<BioAssay> ) thawedBioassays;
}
/**
* Method that allows specification of FactorValueBasicValueObject in the bioMaterialVOs
*
* @param entities the bio assays to convert into a VO
* @param basic true to use FactorValueBasicValueObject, false to use classic FactorValueValueObject
* @return a collection of bioAssay value objects
*/
@Override
//TODO remove when FactorValueValueObject usage is phased out
public Collection<BioAssayValueObject> loadValueObjects( Collection<BioAssay> entities, boolean basic ) {
Collection<BioAssayValueObject> vos = new LinkedHashSet<>();
for ( BioAssay e : entities ) {
vos.add( new BioAssayValueObject( e, basic ) );
}
return vos;
}
@Override
public BioAssayValueObject loadValueObject( BioAssay entity ) {
return new BioAssayValueObject( entity, false );
}
@Override
public Collection<BioAssayValueObject> loadValueObjects( Collection<BioAssay> entities ) {
Collection<BioAssayValueObject> vos = new LinkedHashSet<>();
for ( BioAssay e : entities ) {
vos.add( this.loadValueObject( e ) );
}
return vos;
}
} | apache-2.0 |
arthlimchiu/onleenoot | app/src/main/java/com/example/onleeenoot/addnote/AddNoteActivity.java | 384 | package com.example.onleeenoot.addnote;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import com.example.onleeenoot.R;
public class AddNoteActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_add_note);
}
}
| apache-2.0 |
gerrit-review/gerrit | java/com/google/gerrit/sshd/SshLog.java | 8866 | // Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.sshd;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.MultimapBuilder;
import com.google.gerrit.common.TimeUtil;
import com.google.gerrit.extensions.events.LifecycleListener;
import com.google.gerrit.server.CurrentUser;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.PeerDaemonUser;
import com.google.gerrit.server.audit.AuditService;
import com.google.gerrit.server.audit.SshAuditEvent;
import com.google.gerrit.server.config.GerritServerConfig;
import com.google.gerrit.server.util.IdGenerator;
import com.google.gerrit.server.util.SystemLog;
import com.google.gerrit.sshd.SshScope.Context;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import org.apache.log4j.AsyncAppender;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.spi.LoggingEvent;
import org.eclipse.jgit.lib.Config;
@Singleton
class SshLog implements LifecycleListener {
private static final Logger log = Logger.getLogger(SshLog.class);
private static final String LOG_NAME = "sshd_log";
private static final String P_SESSION = "session";
private static final String P_USER_NAME = "userName";
private static final String P_ACCOUNT_ID = "accountId";
private static final String P_WAIT = "queueWaitTime";
private static final String P_EXEC = "executionTime";
private static final String P_STATUS = "status";
private static final String P_AGENT = "agent";
private final Provider<SshSession> session;
private final Provider<Context> context;
private final AsyncAppender async;
private final AuditService auditService;
@Inject
SshLog(
final Provider<SshSession> session,
final Provider<Context> context,
SystemLog systemLog,
@GerritServerConfig Config config,
AuditService auditService) {
this.session = session;
this.context = context;
this.auditService = auditService;
if (!config.getBoolean("sshd", "requestLog", true)) {
async = null;
return;
}
async = systemLog.createAsyncAppender(LOG_NAME, new SshLogLayout());
}
@Override
public void start() {}
@Override
public void stop() {
if (async != null) {
async.close();
}
}
void onLogin() {
LoggingEvent entry = log("LOGIN FROM " + session.get().getRemoteAddressAsString());
if (async != null) {
async.append(entry);
}
audit(context.get(), "0", "LOGIN");
}
void onAuthFail(SshSession sd) {
final LoggingEvent event =
new LoggingEvent( //
Logger.class.getName(), // fqnOfCategoryClass
log, // logger
TimeUtil.nowMs(), // when
Level.INFO, // level
"AUTH FAILURE FROM " + sd.getRemoteAddressAsString(), // message text
"SSHD", // thread name
null, // exception information
null, // current NDC string
null, // caller location
null // MDC properties
);
event.setProperty(P_SESSION, id(sd.getSessionId()));
event.setProperty(P_USER_NAME, sd.getUsername());
final String error = sd.getAuthenticationError();
if (error != null) {
event.setProperty(P_STATUS, error);
}
if (async != null) {
async.append(event);
}
audit(null, "FAIL", "AUTH");
}
void onExecute(DispatchCommand dcmd, int exitValue, SshSession sshSession) {
final Context ctx = context.get();
ctx.finished = TimeUtil.nowMs();
String cmd = extractWhat(dcmd);
final LoggingEvent event = log(cmd);
event.setProperty(P_WAIT, (ctx.started - ctx.created) + "ms");
event.setProperty(P_EXEC, (ctx.finished - ctx.started) + "ms");
final String status;
switch (exitValue) {
case BaseCommand.STATUS_CANCEL:
status = "killed";
break;
case BaseCommand.STATUS_NOT_FOUND:
status = "not-found";
break;
case BaseCommand.STATUS_NOT_ADMIN:
status = "not-admin";
break;
default:
status = String.valueOf(exitValue);
break;
}
event.setProperty(P_STATUS, status);
String peerAgent = sshSession.getPeerAgent();
if (peerAgent != null) {
event.setProperty(P_AGENT, peerAgent);
}
if (async != null) {
async.append(event);
}
audit(context.get(), status, dcmd);
}
private ListMultimap<String, ?> extractParameters(DispatchCommand dcmd) {
if (dcmd == null) {
return MultimapBuilder.hashKeys(0).arrayListValues(0).build();
}
String[] cmdArgs = dcmd.getArguments();
String paramName = null;
int argPos = 0;
ListMultimap<String, String> parms = MultimapBuilder.hashKeys().arrayListValues().build();
for (int i = 2; i < cmdArgs.length; i++) {
String arg = cmdArgs[i];
// -- stop parameters parsing
if (arg.equals("--")) {
for (i++; i < cmdArgs.length; i++) {
parms.put("$" + argPos++, cmdArgs[i]);
}
break;
}
// --param=value
int eqPos = arg.indexOf('=');
if (arg.startsWith("--") && eqPos > 0) {
parms.put(arg.substring(0, eqPos), arg.substring(eqPos + 1));
continue;
}
// -p value or --param value
if (arg.startsWith("-")) {
if (paramName != null) {
parms.put(paramName, null);
}
paramName = arg;
continue;
}
// value
if (paramName == null) {
parms.put("$" + argPos++, arg);
} else {
parms.put(paramName, arg);
paramName = null;
}
}
if (paramName != null) {
parms.put(paramName, null);
}
return parms;
}
void onLogout() {
LoggingEvent entry = log("LOGOUT");
if (async != null) {
async.append(entry);
}
audit(context.get(), "0", "LOGOUT");
}
private LoggingEvent log(String msg) {
final SshSession sd = session.get();
final CurrentUser user = sd.getUser();
final LoggingEvent event =
new LoggingEvent( //
Logger.class.getName(), // fqnOfCategoryClass
log, // logger
TimeUtil.nowMs(), // when
Level.INFO, // level
msg, // message text
"SSHD", // thread name
null, // exception information
null, // current NDC string
null, // caller location
null // MDC properties
);
event.setProperty(P_SESSION, id(sd.getSessionId()));
String userName = "-";
String accountId = "-";
if (user != null && user.isIdentifiedUser()) {
IdentifiedUser u = user.asIdentifiedUser();
userName = u.getAccount().getUserName();
accountId = "a/" + u.getAccountId().toString();
} else if (user instanceof PeerDaemonUser) {
userName = PeerDaemonUser.USER_NAME;
}
event.setProperty(P_USER_NAME, userName);
event.setProperty(P_ACCOUNT_ID, accountId);
return event;
}
private static String id(int id) {
return IdGenerator.format(id);
}
void audit(Context ctx, Object result, String cmd) {
audit(ctx, result, cmd, null);
}
void audit(Context ctx, Object result, DispatchCommand cmd) {
audit(ctx, result, extractWhat(cmd), extractParameters(cmd));
}
private void audit(Context ctx, Object result, String cmd, ListMultimap<String, ?> params) {
String sessionId;
CurrentUser currentUser;
long created;
if (ctx == null) {
sessionId = null;
currentUser = null;
created = TimeUtil.nowMs();
} else {
SshSession session = ctx.getSession();
sessionId = IdGenerator.format(session.getSessionId());
currentUser = session.getUser();
created = ctx.created;
}
auditService.dispatch(new SshAuditEvent(sessionId, currentUser, cmd, created, params, result));
}
private String extractWhat(DispatchCommand dcmd) {
if (dcmd == null) {
return "Command was already destroyed";
}
StringBuilder commandName = new StringBuilder(dcmd.getCommandName());
String[] args = dcmd.getArguments();
for (int i = 1; i < args.length; i++) {
commandName.append(".").append(args[i]);
}
return commandName.toString();
}
}
| apache-2.0 |
sshiting/solo | src/main/java/org/b3log/solo/service/LinkMgmtService.java | 6661 | /*
* Copyright (c) 2010-2015, b3log.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.b3log.solo.service;
import javax.inject.Inject;
import org.b3log.latke.Keys;
import org.b3log.latke.logging.Level;
import org.b3log.latke.logging.Logger;
import org.b3log.latke.repository.Transaction;
import org.b3log.latke.service.ServiceException;
import org.b3log.latke.service.annotation.Service;
import org.b3log.solo.model.Link;
import org.b3log.solo.repository.LinkRepository;
import org.json.JSONObject;
/**
* Link management service.
*
* @author <a href="http://88250.b3log.org">Liang Ding</a>
* @version 1.0.0.1, Nov 2, 2011
* @since 0.4.0
*/
@Service
public class LinkMgmtService {
/**
* Logger.
*/
private static final Logger LOGGER = Logger.getLogger(LinkMgmtService.class.getName());
/**
* Link repository.
*/
@Inject
private LinkRepository linkRepository;
/**
* Removes a link specified by the given link id.
*
* @param linkId the given link id
* @throws ServiceException service exception
*/
public void removeLink(final String linkId)
throws ServiceException {
final Transaction transaction = linkRepository.beginTransaction();
try {
linkRepository.remove(linkId);
transaction.commit();
} catch (final Exception e) {
if (transaction.isActive()) {
transaction.rollback();
}
LOGGER.log(Level.ERROR, "Removes a link[id=" + linkId + "] failed", e);
throw new ServiceException(e);
}
}
/**
* Updates a link by the specified request json object.
*
* @param requestJSONObject the specified request json object, for example,
* <pre>
* {
* "link": {
* "oId": "",
* "linkTitle": "",
* "linkAddress": ""
* }
* }, see {@link Link} for more details
* </pre>
* @throws ServiceException service exception
*/
public void updateLink(final JSONObject requestJSONObject)
throws ServiceException {
final Transaction transaction = linkRepository.beginTransaction();
try {
final JSONObject link = requestJSONObject.getJSONObject(Link.LINK);
final String linkId = link.getString(Keys.OBJECT_ID);
final JSONObject oldLink = linkRepository.get(linkId);
link.put(Link.LINK_ORDER, oldLink.getInt(Link.LINK_ORDER));
linkRepository.update(linkId, link);
transaction.commit();
} catch (final Exception e) {
if (transaction.isActive()) {
transaction.rollback();
}
LOGGER.log(Level.ERROR, e.getMessage(), e);
throw new ServiceException(e);
}
}
/**
* Changes the order of a link specified by the given link id with the
* specified direction.
*
* @param linkId the given link id
* @param direction the specified direction, "up"/"down"
* @throws ServiceException service exception
*/
public void changeOrder(final String linkId, final String direction)
throws ServiceException {
final Transaction transaction = linkRepository.beginTransaction();
try {
final JSONObject srcLink = linkRepository.get(linkId);
final int srcLinkOrder = srcLink.getInt(Link.LINK_ORDER);
JSONObject targetLink = null;
if ("up".equals(direction)) {
targetLink = linkRepository.getUpper(linkId);
} else { // Down
targetLink = linkRepository.getUnder(linkId);
}
if (null == targetLink) {
if (transaction.isActive()) {
transaction.rollback();
}
LOGGER.log(Level.WARN, "Cant not find the target link of source link[order={0}]", srcLinkOrder);
return;
}
// Swaps
srcLink.put(Link.LINK_ORDER, targetLink.getInt(Link.LINK_ORDER));
targetLink.put(Link.LINK_ORDER, srcLinkOrder);
linkRepository.update(srcLink.getString(Keys.OBJECT_ID), srcLink);
linkRepository.update(targetLink.getString(Keys.OBJECT_ID), targetLink);
transaction.commit();
} catch (final Exception e) {
if (transaction.isActive()) {
transaction.rollback();
}
LOGGER.log(Level.ERROR, "Changes link's order failed", e);
throw new ServiceException(e);
}
}
/**
* Adds a link with the specified request json object.
*
* @param requestJSONObject the specified request json object, for example,
* <pre>
* {
* "link": {
* "linkTitle": "",
* "linkAddress": "",
* "linkDescription": "" // optional
* }
* }, see {@link Link} for more details
* </pre>
* @return generated link id
* @throws ServiceException service exception
*/
public String addLink(final JSONObject requestJSONObject)
throws ServiceException {
final Transaction transaction = linkRepository.beginTransaction();
try {
final JSONObject link = requestJSONObject.getJSONObject(Link.LINK);
final int maxOrder = linkRepository.getMaxOrder();
link.put(Link.LINK_ORDER, maxOrder + 1);
final String ret = linkRepository.add(link);
transaction.commit();
return ret;
} catch (final Exception e) {
if (transaction.isActive()) {
transaction.rollback();
}
LOGGER.log(Level.ERROR, "Adds a link failed", e);
throw new ServiceException(e);
}
}
/**
* Sets the link repository with the specified link repository.
*
* @param linkRepository the specified link repository
*/
public void setLinkRepository(final LinkRepository linkRepository) {
this.linkRepository = linkRepository;
}
}
| apache-2.0 |
dizitart/nitrite-database | nitrite/src/main/java/org/dizitart/no2/internals/JoinedDocumentIterable.java | 4146 | /*
*
* Copyright 2017-2018 Nitrite author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.dizitart.no2.internals;
import org.dizitart.no2.*;
import org.dizitart.no2.exceptions.InvalidOperationException;
import org.dizitart.no2.store.NitriteMap;
import org.dizitart.no2.util.Iterables;
import java.util.*;
import static org.dizitart.no2.exceptions.ErrorMessage.REMOVE_ON_DOCUMENT_ITERATOR_NOT_SUPPORTED;
import static org.dizitart.no2.util.EqualsUtils.deepEquals;
/**
* @author Anindya Chatterjee.
*/
class JoinedDocumentIterable implements RecordIterable<Document> {
private final Collection<NitriteId> resultSet;
private final NitriteMap<NitriteId, Document> underlyingMap;
private boolean hasMore;
private int totalCount;
private Cursor foreignCursor;
private Lookup lookup;
JoinedDocumentIterable(FindResult findResult, Cursor foreignCursor, Lookup lookup) {
this.foreignCursor = foreignCursor;
this.lookup = lookup;
if (findResult.getIdSet() != null) {
resultSet = findResult.getIdSet();
} else {
resultSet = new TreeSet<>();
}
this.underlyingMap = findResult.getUnderlyingMap();
this.hasMore = findResult.isHasMore();
this.totalCount = findResult.getTotalCount();
}
@Override
public boolean hasMore() {
return hasMore;
}
@Override
public int size() {
return resultSet.size();
}
@Override
public int totalCount() {
return totalCount;
}
@Override
public Document firstOrDefault() {
return Iterables.firstOrDefault(this);
}
@Override
public List<Document> toList() {
return Iterables.toList(this);
}
@Override
public Iterator<Document> iterator() {
return new JoinedDocumentIterator();
}
@Override
public String toString() {
return toList().toString();
}
private class JoinedDocumentIterator implements Iterator<Document> {
private Iterator<NitriteId> iterator;
JoinedDocumentIterator() {
iterator = resultSet.iterator();
}
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public Document next() {
NitriteId next = iterator.next();
Document document = underlyingMap.get(next);
if (document != null) {
return join(new Document(document), foreignCursor, lookup);
}
return null;
}
@Override
public void remove() {
throw new InvalidOperationException(REMOVE_ON_DOCUMENT_ITERATOR_NOT_SUPPORTED);
}
private Document join(Document localDocument, Cursor foreignCursor, Lookup lookup) {
Object localObject = localDocument.get(lookup.getLocalField());
if (localObject == null) return localDocument;
Document resultDocument = new Document(localDocument);
Set<Document> target = new HashSet<>();
for (Document foreignDocument: foreignCursor) {
Object foreignObject = foreignDocument.get(lookup.getForeignField());
if (foreignObject != null) {
if (deepEquals(foreignObject, localObject)) {
target.add(foreignDocument);
}
}
}
if (!target.isEmpty()) {
resultDocument.put(lookup.getTargetField(), target);
}
return resultDocument;
}
}
}
| apache-2.0 |
danielmitterdorfer/elasticsearch | plugins/repository-azure/src/test/java/org/elasticsearch/cloud/azure/storage/AzureStorageServiceTests.java | 9243 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cloud.azure.storage;
import com.microsoft.azure.storage.LocationMode;
import com.microsoft.azure.storage.blob.CloudBlobClient;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
import java.net.URI;
import java.net.URISyntaxException;
import static org.elasticsearch.cloud.azure.storage.AzureStorageServiceImpl.blobNameFromUri;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
public class AzureStorageServiceTests extends ESTestCase {
static final Settings settings = Settings.builder()
.put("cloud.azure.storage.azure1.account", "myaccount1")
.put("cloud.azure.storage.azure1.key", "mykey1")
.put("cloud.azure.storage.azure1.default", true)
.put("cloud.azure.storage.azure2.account", "myaccount2")
.put("cloud.azure.storage.azure2.key", "mykey2")
.put("cloud.azure.storage.azure3.account", "myaccount3")
.put("cloud.azure.storage.azure3.key", "mykey3")
.put("cloud.azure.storage.azure3.timeout", "30s")
.build();
public void testGetSelectedClientWithNoPrimaryAndSecondary() {
AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(Settings.EMPTY);
azureStorageService.doStart();
try {
azureStorageService.getSelectedClient("whatever", LocationMode.PRIMARY_ONLY);
fail("we should have raised an IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), is("No primary azure storage can be found. Check your elasticsearch.yml."));
}
}
public void testGetSelectedClientWithNoSecondary() {
AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(Settings.builder()
.put("cloud.azure.storage.azure1.account", "myaccount1")
.put("cloud.azure.storage.azure1.key", "mykey1")
.build());
azureStorageService.doStart();
CloudBlobClient client = azureStorageService.getSelectedClient("azure1", LocationMode.PRIMARY_ONLY);
assertThat(client.getEndpoint(), is(URI.create("https://azure1")));
}
public void testGetDefaultClientWithNoSecondary() {
AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(Settings.builder()
.put("cloud.azure.storage.azure1.account", "myaccount1")
.put("cloud.azure.storage.azure1.key", "mykey1")
.build());
azureStorageService.doStart();
CloudBlobClient client = azureStorageService.getSelectedClient(null, LocationMode.PRIMARY_ONLY);
assertThat(client.getEndpoint(), is(URI.create("https://azure1")));
}
public void testGetSelectedClientPrimary() {
AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(settings);
azureStorageService.doStart();
CloudBlobClient client = azureStorageService.getSelectedClient("azure1", LocationMode.PRIMARY_ONLY);
assertThat(client.getEndpoint(), is(URI.create("https://azure1")));
}
public void testGetSelectedClientSecondary1() {
AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(settings);
azureStorageService.doStart();
CloudBlobClient client = azureStorageService.getSelectedClient("azure2", LocationMode.PRIMARY_ONLY);
assertThat(client.getEndpoint(), is(URI.create("https://azure2")));
}
public void testGetSelectedClientSecondary2() {
AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(settings);
azureStorageService.doStart();
CloudBlobClient client = azureStorageService.getSelectedClient("azure3", LocationMode.PRIMARY_ONLY);
assertThat(client.getEndpoint(), is(URI.create("https://azure3")));
}
public void testGetDefaultClientWithPrimaryAndSecondaries() {
AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(settings);
azureStorageService.doStart();
CloudBlobClient client = azureStorageService.getSelectedClient(null, LocationMode.PRIMARY_ONLY);
assertThat(client.getEndpoint(), is(URI.create("https://azure1")));
}
public void testGetSelectedClientNonExisting() {
AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(settings);
azureStorageService.doStart();
try {
azureStorageService.getSelectedClient("azure4", LocationMode.PRIMARY_ONLY);
fail("we should have raised an IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), is("Can not find azure account [azure4]. Check your elasticsearch.yml."));
}
}
public void testGetSelectedClientDefault() {
AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(settings);
azureStorageService.doStart();
CloudBlobClient client = azureStorageService.getSelectedClient(null, LocationMode.PRIMARY_ONLY);
assertThat(client.getEndpoint(), is(URI.create("https://azure1")));
}
public void testGetSelectedClientGlobalTimeout() {
Settings timeoutSettings = Settings.builder()
.put(settings)
.put(AzureStorageService.Storage.TIMEOUT_SETTING.getKey(), "10s")
.build();
AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(timeoutSettings);
azureStorageService.doStart();
CloudBlobClient client1 = azureStorageService.getSelectedClient("azure1", LocationMode.PRIMARY_ONLY);
assertThat(client1.getDefaultRequestOptions().getTimeoutIntervalInMs(), is(10 * 1000));
CloudBlobClient client3 = azureStorageService.getSelectedClient("azure3", LocationMode.PRIMARY_ONLY);
assertThat(client3.getDefaultRequestOptions().getTimeoutIntervalInMs(), is(30 * 1000));
}
public void testGetSelectedClientDefaultTimeout() {
AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(settings);
azureStorageService.doStart();
CloudBlobClient client1 = azureStorageService.getSelectedClient("azure1", LocationMode.PRIMARY_ONLY);
assertThat(client1.getDefaultRequestOptions().getTimeoutIntervalInMs(), nullValue());
CloudBlobClient client3 = azureStorageService.getSelectedClient("azure3", LocationMode.PRIMARY_ONLY);
assertThat(client3.getDefaultRequestOptions().getTimeoutIntervalInMs(), is(30 * 1000));
}
public void testGetSelectedClientNoTimeout() {
Settings timeoutSettings = Settings.builder()
.put("cloud.azure.storage.azure.account", "myaccount")
.put("cloud.azure.storage.azure.key", "mykey")
.build();
AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMock(timeoutSettings);
azureStorageService.doStart();
CloudBlobClient client1 = azureStorageService.getSelectedClient("azure", LocationMode.PRIMARY_ONLY);
assertThat(client1.getDefaultRequestOptions().getTimeoutIntervalInMs(), is(nullValue()));
}
/**
* This internal class just overload createClient method which is called by AzureStorageServiceImpl.doStart()
*/
class AzureStorageServiceMock extends AzureStorageServiceImpl {
public AzureStorageServiceMock(Settings settings) {
super(settings);
}
// We fake the client here
@Override
void createClient(AzureStorageSettings azureStorageSettings) {
this.clients.put(azureStorageSettings.getAccount(),
new CloudBlobClient(URI.create("https://" + azureStorageSettings.getName())));
}
}
public void testBlobNameFromUri() throws URISyntaxException {
String name = blobNameFromUri(new URI("https://myservice.azure.net/container/path/to/myfile"));
assertThat(name, is("path/to/myfile"));
name = blobNameFromUri(new URI("http://myservice.azure.net/container/path/to/myfile"));
assertThat(name, is("path/to/myfile"));
name = blobNameFromUri(new URI("http://127.0.0.1/container/path/to/myfile"));
assertThat(name, is("path/to/myfile"));
name = blobNameFromUri(new URI("https://127.0.0.1/container/path/to/myfile"));
assertThat(name, is("path/to/myfile"));
}
}
| apache-2.0 |
Nextdoor/bender | handlers/src/test/java/com/nextdoor/bender/handler/dynamodb/DynamodbEventDeserializer.java | 2968 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*
* Copyright 2018 Nextdoor.com, Inc
*
*/
package com.nextdoor.bender.handler.dynamodb;
import com.amazonaws.services.dynamodbv2.model.OperationType;
import com.amazonaws.services.dynamodbv2.model.StreamViewType;
import com.amazonaws.services.lambda.runtime.events.DynamodbEvent;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategy;
import java.io.IOException;
public class DynamodbEventDeserializer {
private static ObjectMapper mapper = new ObjectMapper();
static {
mapper.addMixIn(com.amazonaws.services.dynamodbv2.model.Record.class, RecordIgnoreDuplicateMethods.class);
mapper.addMixIn(com.amazonaws.services.dynamodbv2.model.StreamRecord.class, StreamRecordIgnoreDuplicateMethods.class);
mapper.setPropertyNamingStrategy(new PropertyNamingFix());
}
interface RecordIgnoreDuplicateMethods {
@JsonIgnore
public void setEventName(OperationType eventName);
@JsonProperty("eventName")
public void setEventName(String eventName);
}
interface StreamRecordIgnoreDuplicateMethods {
@JsonIgnore
public void setStreamViewType(StreamViewType streamViewType);
@JsonProperty("StreamViewType")
public void setStreamViewType(String streamViewType);
}
public static class PropertyNamingFix extends PropertyNamingStrategy.PropertyNamingStrategyBase {
@Override
public String translate(String propertyName) {
switch(propertyName) {
case "eventID":
return "eventID";
case "eventVersion":
return "eventVersion";
case "eventSource":
return "eventSource";
case "awsRegion":
return "awsRegion";
case "dynamodb":
return "dynamodb";
case "eventSourceARN":
return "eventSourceARN";
case "bool":
return "BOOL";
case "ss":
return "SS";
case "ns":
return "NS";
case "bs":
return "BS";
default:
String first = propertyName.substring(0, 1);
String rest = propertyName.substring(1);
return first.toUpperCase() + rest;
}
}
}
public static DynamodbEvent deserialize(String json) throws IOException {
return mapper.readValue(json, DynamodbEvent.class);
}
}
| apache-2.0 |
spring-cloud/spring-cloud-zookeeper | spring-cloud-zookeeper-discovery/src/main/java/org/springframework/cloud/zookeeper/discovery/ZookeeperServiceWatch.java | 3182 | /*
* Copyright 2015-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.zookeeper.discovery;
import java.util.concurrent.atomic.AtomicLong;
import jakarta.annotation.PreDestroy;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.recipes.cache.TreeCache;
import org.apache.curator.framework.recipes.cache.TreeCacheEvent;
import org.apache.curator.framework.recipes.cache.TreeCacheListener;
import org.springframework.cloud.client.discovery.event.HeartbeatEvent;
import org.springframework.cloud.client.discovery.event.InstanceRegisteredEvent;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.context.ApplicationEventPublisherAware;
import org.springframework.context.ApplicationListener;
import org.springframework.util.ReflectionUtils;
/**
* A {@link TreeCacheListener} that sends {@link HeartbeatEvent} when an entry inside
* Zookeeper has changed.
*
* @author Spencer Gibb
* @since 1.0.0
*/
public class ZookeeperServiceWatch
implements ApplicationListener<InstanceRegisteredEvent<?>>, TreeCacheListener,
ApplicationEventPublisherAware {
private final CuratorFramework curator;
private final ZookeeperDiscoveryProperties properties;
private final AtomicLong cacheChange = new AtomicLong(0);
private ApplicationEventPublisher publisher;
private TreeCache cache;
public ZookeeperServiceWatch(CuratorFramework curator,
ZookeeperDiscoveryProperties properties) {
this.curator = curator;
this.properties = properties;
}
@Override
public void setApplicationEventPublisher(ApplicationEventPublisher publisher) {
this.publisher = publisher;
}
public TreeCache getCache() {
return this.cache;
}
@Override
public void onApplicationEvent(InstanceRegisteredEvent<?> event) {
this.cache = TreeCache.newBuilder(this.curator, this.properties.getRoot())
.build();
this.cache.getListenable().addListener(this);
try {
this.cache.start();
}
catch (Exception e) {
ReflectionUtils.rethrowRuntimeException(e);
}
}
@PreDestroy
public void stop() throws Exception {
if (this.cache != null) {
this.cache.close();
}
}
@Override
public void childEvent(CuratorFramework client, TreeCacheEvent event)
throws Exception {
if (event.getType().equals(TreeCacheEvent.Type.NODE_ADDED)
|| event.getType().equals(TreeCacheEvent.Type.NODE_REMOVED)
|| event.getType().equals(TreeCacheEvent.Type.NODE_UPDATED)) {
long newCacheChange = this.cacheChange.incrementAndGet();
this.publisher.publishEvent(new HeartbeatEvent(this, newCacheChange));
}
}
}
| apache-2.0 |
mwjmurphy/Axel-Framework | axel-db/src/main/java/org/xmlactions/db/query/Group.java | 186 | package org.xmlactions.db.query;
public class Group {
private String by;
public void setBy(String by) {
this.by = by;
}
public String getBy() {
return by;
}
}
| apache-2.0 |
jsankey/zutubi-android-ant | src/com/zutubi/android/ant/ParseException.java | 750 |
package com.zutubi.android.ant;
/**
* Exception raised on failure to parse a manifest.
*
* @see Manifest
*/
public class ParseException extends Exception {
private static final long serialVersionUID = 7467884156539661067L;
/**
* Creates a new exception with the given error details.
*
* @param message context about the error that occurred
*/
public ParseException(final String message) {
super(message);
}
/**
* Creates a new exception caused by another exception.
*
* @param message context about the error that occurred
* @param t the cause of this exception
*/
public ParseException(final String message, final Throwable t) {
super(message, t);
}
}
| apache-2.0 |
argv0/cloudstack | awsapi/src/com/amazon/ec2/InstancePlacementType.java | 28508 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
/**
* InstancePlacementType.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: 1.5.1 Built on : Oct 19, 2009 (10:59:34 EDT)
*/
package com.amazon.ec2;
/**
* InstancePlacementType bean class
*/
public class InstancePlacementType
implements org.apache.axis2.databinding.ADBBean{
/* This type was generated from the piece of schema that had
name = InstancePlacementType
Namespace URI = http://ec2.amazonaws.com/doc/2010-11-15/
Namespace Prefix = ns1
*/
private static java.lang.String generatePrefix(java.lang.String namespace) {
if(namespace.equals("http://ec2.amazonaws.com/doc/2010-11-15/")){
return "ns1";
}
return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
/**
* field for AvailabilityZone
*/
protected java.lang.String localAvailabilityZone ;
/* This tracker boolean wil be used to detect whether the user called the set method
* for this attribute. It will be used to determine whether to include this field
* in the serialized XML
*/
protected boolean localAvailabilityZoneTracker = false ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getAvailabilityZone(){
return localAvailabilityZone;
}
/**
* Auto generated setter method
* @param param AvailabilityZone
*/
public void setAvailabilityZone(java.lang.String param){
if (param != null){
//update the setting tracker
localAvailabilityZoneTracker = true;
} else {
localAvailabilityZoneTracker = false;
}
this.localAvailabilityZone=param;
}
/**
* field for GroupName
*/
protected java.lang.String localGroupName ;
/* This tracker boolean wil be used to detect whether the user called the set method
* for this attribute. It will be used to determine whether to include this field
* in the serialized XML
*/
protected boolean localGroupNameTracker = false ;
/**
* Auto generated getter method
* @return java.lang.String
*/
public java.lang.String getGroupName(){
return localGroupName;
}
/**
* Auto generated setter method
* @param param GroupName
*/
public void setGroupName(java.lang.String param){
if (param != null){
//update the setting tracker
localGroupNameTracker = true;
} else {
localGroupNameTracker = false;
}
this.localGroupName=param;
}
/**
* isReaderMTOMAware
* @return true if the reader supports MTOM
*/
public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) {
boolean isReaderMTOMAware = false;
try{
isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE));
}catch(java.lang.IllegalArgumentException e){
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
/**
*
* @param parentQName
* @param factory
* @return org.apache.axiom.om.OMElement
*/
public org.apache.axiom.om.OMElement getOMElement (
final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{
org.apache.axiom.om.OMDataSource dataSource =
new org.apache.axis2.databinding.ADBDataSource(this,parentQName){
public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
InstancePlacementType.this.serialize(parentQName,factory,xmlWriter);
}
};
return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl(
parentQName,factory,dataSource);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final javax.xml.namespace.QName parentQName,
final org.apache.axiom.om.OMFactory factory,
org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{
java.lang.String prefix = null;
java.lang.String namespace = null;
prefix = parentQName.getPrefix();
namespace = parentQName.getNamespaceURI();
if ((namespace != null) && (namespace.trim().length() > 0)) {
java.lang.String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null) {
xmlWriter.writeStartElement(namespace, parentQName.getLocalPart());
} else {
if (prefix == null) {
prefix = generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
} else {
xmlWriter.writeStartElement(parentQName.getLocalPart());
}
if (serializeType){
java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2010-11-15/");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
namespacePrefix+":InstancePlacementType",
xmlWriter);
} else {
writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type",
"InstancePlacementType",
xmlWriter);
}
}
if (localAvailabilityZoneTracker){
namespace = "http://ec2.amazonaws.com/doc/2010-11-15/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"availabilityZone", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"availabilityZone");
}
} else {
xmlWriter.writeStartElement("availabilityZone");
}
if (localAvailabilityZone==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("availabilityZone cannot be null!!");
}else{
xmlWriter.writeCharacters(localAvailabilityZone);
}
xmlWriter.writeEndElement();
} if (localGroupNameTracker){
namespace = "http://ec2.amazonaws.com/doc/2010-11-15/";
if (! namespace.equals("")) {
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
xmlWriter.writeStartElement(prefix,"groupName", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
} else {
xmlWriter.writeStartElement(namespace,"groupName");
}
} else {
xmlWriter.writeStartElement("groupName");
}
if (localGroupName==null){
// write the nil attribute
throw new org.apache.axis2.databinding.ADBException("groupName cannot be null!!");
}else{
xmlWriter.writeCharacters(localGroupName);
}
xmlWriter.writeEndElement();
}
xmlWriter.writeEndElement();
}
/**
* Util method to write an attribute with the ns prefix
*/
private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (xmlWriter.getPrefix(namespace) == null) {
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace,attName,attValue);
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeAttribute(java.lang.String namespace,java.lang.String attName,
java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{
if (namespace.equals(""))
{
xmlWriter.writeAttribute(attName,attValue);
}
else
{
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace,attName,attValue);
}
}
/**
* Util method to write an attribute without the ns prefix
*/
private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName,
javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String attributeNamespace = qname.getNamespaceURI();
java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace);
if (attributePrefix == null) {
attributePrefix = registerPrefix(xmlWriter, attributeNamespace);
}
java.lang.String attributeValue;
if (attributePrefix.trim().length() > 0) {
attributeValue = attributePrefix + ":" + qname.getLocalPart();
} else {
attributeValue = qname.getLocalPart();
}
if (namespace.equals("")) {
xmlWriter.writeAttribute(attName, attributeValue);
} else {
registerPrefix(xmlWriter, namespace);
xmlWriter.writeAttribute(namespace, attName, attributeValue);
}
}
/**
* method to handle Qnames
*/
private void writeQName(javax.xml.namespace.QName qname,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
java.lang.String namespaceURI = qname.getNamespaceURI();
if (namespaceURI != null) {
java.lang.String prefix = xmlWriter.getPrefix(namespaceURI);
if (prefix == null) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
} else {
// i.e this is the default namespace
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
} else {
xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname));
}
}
private void writeQNames(javax.xml.namespace.QName[] qnames,
javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException {
if (qnames != null) {
// we have to store this data until last moment since it is not possible to write any
// namespace data after writing the charactor data
java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer();
java.lang.String namespaceURI = null;
java.lang.String prefix = null;
for (int i = 0; i < qnames.length; i++) {
if (i > 0) {
stringToWrite.append(" ");
}
namespaceURI = qnames[i].getNamespaceURI();
if (namespaceURI != null) {
prefix = xmlWriter.getPrefix(namespaceURI);
if ((prefix == null) || (prefix.length() == 0)) {
prefix = generatePrefix(namespaceURI);
xmlWriter.writeNamespace(prefix, namespaceURI);
xmlWriter.setPrefix(prefix,namespaceURI);
}
if (prefix.trim().length() > 0){
stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
} else {
stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i]));
}
}
xmlWriter.writeCharacters(stringToWrite.toString());
}
}
/**
* Register a namespace prefix
*/
private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException {
java.lang.String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null) {
prefix = generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) {
prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
/**
* databinding method to get an XML representation of this object
*
*/
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName)
throws org.apache.axis2.databinding.ADBException{
java.util.ArrayList elementList = new java.util.ArrayList();
java.util.ArrayList attribList = new java.util.ArrayList();
if (localAvailabilityZoneTracker){
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/",
"availabilityZone"));
if (localAvailabilityZone != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localAvailabilityZone));
} else {
throw new org.apache.axis2.databinding.ADBException("availabilityZone cannot be null!!");
}
} if (localGroupNameTracker){
elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/",
"groupName"));
if (localGroupName != null){
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localGroupName));
} else {
throw new org.apache.axis2.databinding.ADBException("groupName cannot be null!!");
}
}
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray());
}
/**
* Factory class that keeps the parse method
*/
public static class Factory{
/**
* static method to create the object
* Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable
* If this object is not an element, it is a complex type and the reader is at the event just after the outer start element
* Postcondition: If this object is an element, the reader is positioned at its end element
* If this object is a complex type, the reader is positioned at the end element of its outer element
*/
public static InstancePlacementType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{
InstancePlacementType object =
new InstancePlacementType();
int event;
java.lang.String nillableValue = null;
java.lang.String prefix ="";
java.lang.String namespaceuri ="";
try {
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){
java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"type");
if (fullTypeName!=null){
java.lang.String nsPrefix = null;
if (fullTypeName.indexOf(":") > -1){
nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":"));
}
nsPrefix = nsPrefix==null?"":nsPrefix;
java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1);
if (!"InstancePlacementType".equals(type)){
//find namespace for the prefix
java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix);
return (InstancePlacementType)com.amazon.ec2.ExtensionMapper.getTypeObject(
nsUri,type,reader);
}
}
}
// Note all attributes that were handled. Used to differ normal attributes
// from anyAttributes.
java.util.Vector handledAttributes = new java.util.Vector();
reader.next();
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/","availabilityZone").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setAvailabilityZone(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else {
}
while (!reader.isStartElement() && !reader.isEndElement()) reader.next();
if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/","groupName").equals(reader.getName())){
java.lang.String content = reader.getElementText();
object.setGroupName(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content));
reader.next();
} // End of if for expected property start element
else {
}
while (!reader.isStartElement() && !reader.isEndElement())
reader.next();
if (reader.isStartElement())
// A start element we are not expecting indicates a trailing invalid property
throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName());
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
| apache-2.0 |
hnccfr/ccfrweb | tradecore/src/com/hundsun/network/gates/qingbo/biz/service/pojo/project/ProjectListingServiceImpl.java | 8359 | /* */ package com.hundsun.network.gates.qingbo.biz.service.pojo.project;
/* */
/* */ import com.hundsun.network.gates.luosi.biz.security.ServiceException;
/* */ import com.hundsun.network.gates.luosi.common.enums.EnumProjectStatus;
/* */ import com.hundsun.network.gates.luosi.common.remote.ServiceResult;
/* */ import com.hundsun.network.gates.luosi.qingbo.reomte.enums.EnumTradeResultErrors;
/* */ import com.hundsun.network.gates.luosi.wulin.reomte.dto.ProjectListingDTO;
/* */ import com.hundsun.network.gates.luosi.wulin.reomte.enums.EnumProjectListingResultErrors;
/* */ import com.hundsun.network.gates.luosi.wulin.reomte.result.ProjectListingServiceResult;
/* */ import com.hundsun.network.gates.qingbo.biz.dao.project.ProjectListingDAO;
/* */ import com.hundsun.network.gates.qingbo.biz.domain.project.ProjectListing;
/* */ import com.hundsun.network.gates.qingbo.biz.service.BaseService;
/* */ import com.hundsun.network.gates.qingbo.biz.service.project.ProjectListingService;
/* */ import com.hundsun.network.gates.qingbo.biz.util.ConvertUtils;
/* */ import com.hundsun.network.melody.common.util.StringUtil;
/* */ import java.sql.SQLException;
/* */ import java.util.HashMap;
/* */ import java.util.Map;
/* */ import org.apache.commons.logging.Log;
/* */ import org.springframework.beans.factory.annotation.Autowired;
/* */ import org.springframework.stereotype.Service;
/* */ import org.springframework.transaction.TransactionStatus;
/* */ import org.springframework.transaction.support.TransactionCallback;
/* */ import org.springframework.transaction.support.TransactionTemplate;
/* */
/* */ @Service("projectListingService")
/* */ public class ProjectListingServiceImpl extends BaseService
/* */ implements ProjectListingService
/* */ {
/* */
/* */ @Autowired
/* */ private ProjectListingDAO projectListingDAO;
/* */
/* */ public ProjectListingServiceResult addProjectListing(ProjectListing projectListing)
/* */ throws Exception
/* */ {
/* 44 */ ProjectListingServiceResult result = new ProjectListingServiceResult();
/* 45 */ if (null == projectListing) {
/* 46 */ result.setErrorNO(Integer.valueOf(EnumProjectListingResultErrors.PARAMETER_ERROR.getValue()));
/* 47 */ result.setErrorInfo(EnumProjectListingResultErrors.PARAMETER_ERROR.getInfo());
/* 48 */ this.log.error("add projectListing fail, " + result.getErrorInfo());
/* 49 */ throw new ServiceException();
/* */ }
/* 51 */ String projectId = this.projectListingDAO.addProjectListing(projectListing);
/* 52 */ projectListing.setId(Long.valueOf(projectId));
/* 53 */ ProjectListingDTO projectListingDTO = new ProjectListingDTO();
/* 54 */ projectListingDTO = ConvertUtils.convertProjectListing2ProjectListingDTO(projectListing);
/* */
/* 56 */ result.setProjectListingDTO(projectListingDTO);
/* 57 */ return result;
/* */ }
/* */
/* */ private ServiceResult doWithdrawal(Long projectListingId, String operatorAccount)
/* */ {
/* 69 */ final Long proId = projectListingId;
/* 70 */ final String operator = operatorAccount;
/* */
/* 72 */ ServiceResult withdrawalResult = (ServiceResult)this.transactionTemplate.execute(new TransactionCallback()
/* */ {
/* */ public ServiceResult doInTransaction(TransactionStatus status) {
/* 75 */ ServiceResult result = new ServiceResult();
/* */
/* 79 */ ProjectListing projectListing = ProjectListingServiceImpl.this.projectListingDAO.getProSimpInfo(proId);
/* */
/* 81 */ projectListing.setStatus(EnumProjectStatus.WITHDRAWAL.getValue());
/* 82 */ projectListing.setOperator(operator);
/* 83 */ ProjectListingServiceImpl.this.projectListingDAO.updateStatusById(projectListing);
/* 84 */ return result;
/* */ }
/* */ });
/* 87 */ return withdrawalResult;
/* */ }
/* */
/* */ public ProjectListingServiceResult tradeClearProject(ProjectListing projectListing)
/* */ {
/* 97 */ ProjectListingServiceResult result = new ProjectListingServiceResult();
/* 98 */ ProjectListing projObjOld = this.projectListingDAO.selectByProjectCode(projectListing.getCode());
/* 99 */ Long remain = Long.valueOf(projObjOld.getQuantity().longValue() - projectListing.getQuantity().longValue());
/* 100 */ Long quantity = projectListing.getQuantity();
/* 101 */ projectListing.setId(null);
/* 102 */ projectListing.setTitle(null);
/* 103 */ projectListing.setQuantity(remain);
/* 104 */ if (remain.longValue() > 0L) {
/* 105 */ projectListing.setUserAccount(null);
/* */
/* 107 */ Map paramMap = new HashMap();
/* 108 */ paramMap.put("code", projectListing.getCode());
/* 109 */ paramMap.put("quantity", remain);
/* 110 */ paramMap.put("quantityWhere", quantity);
/* 111 */ paramMap.put("statusWhere", projObjOld.getStatus());
/* 112 */ if (this.projectListingDAO.updateProjectListing(paramMap) <= 0) {
/* 113 */ result.setErrorInfo(EnumTradeResultErrors.ORDER_QUANTITY_ERROR.getInfo());
/* 114 */ result.setErrorNO(Integer.valueOf(EnumTradeResultErrors.ORDER_QUANTITY_ERROR.getValue()));
/* 115 */ return result;
/* */ }
/* 117 */ paramMap.clear();
/* 118 */ projObjOld.setQuantity(remain);
/* 119 */ ProjectListingDTO projectListingDTO = new ProjectListingDTO();
/* */ try {
/* 121 */ projectListingDTO = ConvertUtils.convertProjectListing2ProjectListingDTO(projObjOld);
/* */ }
/* */ catch (Exception e) {
/* 124 */ this.log.error("wulin projetlisting对象转换 projectlistingDTO 出错", e);
/* 125 */ result.setErrorInfo("wulin projetlisting对象转换 projectlistingDTO 出错");
/* */ }
/* 127 */ result.setProjectListingDTO(projectListingDTO);
/* */ } else {
/* 129 */ Map paramMap = new HashMap();
/* 130 */ paramMap.put("code", projectListing.getCode());
/* 131 */ paramMap.put("quantity", Long.valueOf(0L));
/* 132 */ paramMap.put("status", EnumProjectStatus.OVER.getValue());
/* 133 */ paramMap.put("quantityWhere", quantity);
/* 134 */ paramMap.put("statusWhere", projObjOld.getStatus());
/* 135 */ if (this.projectListingDAO.updateProjectListing(paramMap) <= 0) {
/* 136 */ result.setErrorInfo(EnumTradeResultErrors.ORDER_QUANTITY_ERROR.getInfo());
/* 137 */ result.setErrorNO(Integer.valueOf(EnumTradeResultErrors.ORDER_QUANTITY_ERROR.getValue()));
/* 138 */ return result;
/* */ }
/* 140 */ paramMap.clear();
/* */
/* 142 */ projObjOld.setQuantity(Long.valueOf(0L));
/* 143 */ projObjOld.setStatus(EnumProjectStatus.OVER.getValue());
/* 144 */ ProjectListingDTO projectListingDTO = new ProjectListingDTO();
/* */ try {
/* 146 */ projectListingDTO = ConvertUtils.convertProjectListing2ProjectListingDTO(projObjOld);
/* */ }
/* */ catch (Exception e)
/* */ {
/* 150 */ this.log.error("wulin projetlisting对象转换 projectlistingDTO 出错", e);
/* 151 */ result.setErrorInfo("wulin projetlisting对象转换 projectlistingDTO 出错");
/* */ }
/* */
/* 155 */ result.setProjectListingDTO(projectListingDTO);
/* */ }
/* */
/* 158 */ return result;
/* */ }
/* */
/* */ public ProjectListing getProjectListingByCode(String projectCode)
/* */ throws SQLException
/* */ {
/* 169 */ if ((null == projectCode) || (StringUtil.isEmpty(projectCode))) {
/* 170 */ return null;
/* */ }
/* 172 */ return this.projectListingDAO.selectByProjectCode(projectCode);
/* */ }
/* */ }
/* Location: E:\__安装归档\linquan-20161112\deploy16\qingbo\webroot\WEB-INF\classes\
* Qualified Name: com.hundsun.network.gates.qingbo.biz.service.pojo.project.ProjectListingServiceImpl
* JD-Core Version: 0.6.0
*/ | apache-2.0 |
Android-W/android-samples | app/src/main/java/androidsamples/androidw/com/androidsamples/view/flicker/FlickerActivity.java | 1449 | package androidsamples.androidw.com.androidsamples.view.flicker;
import android.os.Bundle;
import android.support.annotation.Nullable;
import androidsamples.androidw.com.androidsamples.R;
import androidsamples.androidw.com.androidsamples.base.view.BaseActivity;
import androidsamples.androidw.com.androidsamples.view.flicker.presenter.FlickerPresenter;
import androidsamples.androidw.com.androidsamples.network.RetrofitPhoto;
import androidsamples.androidw.com.androidsamples.util.ActivityUtil;
/**
* Created by Tae-hwan on 6/2/16.
*/
public class FlickerActivity extends BaseActivity {
private FlickerPresenter flickerPresenter;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_flicker);
FlickerFragment flickerFragment = (FlickerFragment) getSupportFragmentManager().findFragmentById(R.id.contentFrame);
if (flickerFragment == null) {
flickerFragment = FlickerFragment.newInstance();
ActivityUtil.addFragmentToActivity(getSupportFragmentManager(), flickerFragment, R.id.contentFrame);
}
flickerPresenter = new FlickerPresenter(flickerFragment, RetrofitPhoto.getRetrofitPhoto());
}
@Override
protected void onDestroy() {
super.onDestroy();
if (flickerPresenter != null) {
flickerPresenter.onDestroy();
}
}
}
| apache-2.0 |
usc/demo | src/main/java/org/usc/demo/guava/LockTest1.java | 562 | package org.usc.demo.guava;
import com.google.common.collect.Lists;
import java.util.concurrent.CopyOnWriteArrayList;
/**
*
* @author Shunli
*/
public class LockTest1 {
public static void main(String[] args) {
CopyOnWriteArrayList<Object> newCopyOnWriteArrayList = Lists.newCopyOnWriteArrayList();
System.out.println(newCopyOnWriteArrayList.addIfAbsent("1212"));
System.out.println(newCopyOnWriteArrayList.addIfAbsent("1212"));
System.out.println(newCopyOnWriteArrayList.remove("1212"));
}
}
| apache-2.0 |
sendy943/jfinal-wxmall | wxmall-model/src/main/java/com/dbumama/market/model/base/BaseExpressTemplate.java | 2705 | package com.dbumama.market.model.base;
import com.jfinal.plugin.activerecord.Model;
import com.jfinal.plugin.activerecord.IBean;
/**
* Generated by JFinal, do not modify this file.
*/
@SuppressWarnings("serial")
public abstract class BaseExpressTemplate<M extends BaseExpressTemplate<M>> extends Model<M> implements IBean {
public void setId(java.lang.Long id) {
set("id", id);
}
public java.lang.Long getId() {
return get("id");
}
public void setSellerId(java.lang.Long sellerId) {
set("seller_id", sellerId);
}
public java.lang.Long getSellerId() {
return get("seller_id");
}
public void setExpName(java.lang.String expName) {
set("exp_name", expName);
}
public java.lang.String getExpName() {
return get("exp_name");
}
public void setExpKey(java.lang.String expKey) {
set("exp_key", expKey);
}
public java.lang.String getExpKey() {
return get("exp_key");
}
public void setExpTplcontent(java.lang.String expTplcontent) {
set("exp_tplcontent", expTplcontent);
}
public java.lang.String getExpTplcontent() {
return get("exp_tplcontent");
}
public void setExpDesignhtml(java.lang.String expDesignhtml) {
set("exp_designhtml", expDesignhtml);
}
public java.lang.String getExpDesignhtml() {
return get("exp_designhtml");
}
public void setExpBgimg(java.lang.String expBgimg) {
set("exp_bgimg", expBgimg);
}
public java.lang.String getExpBgimg() {
return get("exp_bgimg");
}
public void setIsDefault(java.lang.Integer isDefault) {
set("is_default", isDefault);
}
public java.lang.Integer getIsDefault() {
return get("is_default");
}
public void setPagewidth(java.lang.Integer pagewidth) {
set("pagewidth", pagewidth);
}
public java.lang.Integer getPagewidth() {
return get("pagewidth");
}
public void setPageheight(java.lang.Integer pageheight) {
set("pageheight", pageheight);
}
public java.lang.Integer getPageheight() {
return get("pageheight");
}
public void setOffsetx(java.lang.Integer offsetx) {
set("offsetx", offsetx);
}
public java.lang.Integer getOffsetx() {
return get("offsetx");
}
public void setOffsety(java.lang.Integer offsety) {
set("offsety", offsety);
}
public java.lang.Integer getOffsety() {
return get("offsety");
}
public void setActive(java.lang.Integer active) {
set("active", active);
}
public java.lang.Integer getActive() {
return get("active");
}
public void setCreated(java.util.Date created) {
set("created", created);
}
public java.util.Date getCreated() {
return get("created");
}
public void setUpdated(java.util.Date updated) {
set("updated", updated);
}
public java.util.Date getUpdated() {
return get("updated");
}
}
| apache-2.0 |
DavidHerzogTU-Berlin/cassandraToRun | src/java/org/apache/cassandra/tools/NodeProbe.java | 32303 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.tools;
import java.io.IOException;
import java.io.PrintStream;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryMXBean;
import java.lang.management.MemoryUsage;
import java.lang.management.RuntimeMXBean;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.locks.Condition;
import javax.management.*;
import javax.management.remote.JMXConnector;
import javax.management.remote.JMXConnectorFactory;
import javax.management.remote.JMXServiceURL;
import com.google.common.collect.Iterables;
import org.apache.cassandra.concurrent.JMXEnabledThreadPoolExecutorMBean;
import org.apache.cassandra.db.ColumnFamilyStoreMBean;
import org.apache.cassandra.db.HintedHandOffManager;
import org.apache.cassandra.db.HintedHandOffManagerMBean;
import org.apache.cassandra.db.compaction.CompactionManager;
import org.apache.cassandra.db.compaction.CompactionManagerMBean;
import org.apache.cassandra.gms.FailureDetector;
import org.apache.cassandra.gms.FailureDetectorMBean;
import org.apache.cassandra.locator.EndpointSnitchInfoMBean;
import org.apache.cassandra.net.MessagingService;
import org.apache.cassandra.net.MessagingServiceMBean;
import org.apache.cassandra.service.*;
import org.apache.cassandra.streaming.StreamState;
import org.apache.cassandra.streaming.StreamManagerMBean;
import org.apache.cassandra.utils.SimpleCondition;
/**
* JMX client operations for Cassandra.
*/
public class NodeProbe
{
private static final String fmtUrl = "service:jmx:rmi:///jndi/rmi://%s:%d/jmxrmi";
private static final String ssObjName = "org.apache.cassandra.db:type=StorageService";
private static final int defaultPort = 7199;
final String host;
final int port;
private String username;
private String password;
private JMXConnector jmxc;
private MBeanServerConnection mbeanServerConn;
private CompactionManagerMBean compactionProxy;
private StorageServiceMBean ssProxy;
private MemoryMXBean memProxy;
private RuntimeMXBean runtimeProxy;
private StreamManagerMBean streamProxy;
public MessagingServiceMBean msProxy;
private FailureDetectorMBean fdProxy;
private CacheServiceMBean cacheService;
private StorageProxyMBean spProxy;
private HintedHandOffManagerMBean hhProxy;
private boolean failed;
/**
* Creates a NodeProbe using the specified JMX host, port, username, and password.
*
* @param host hostname or IP address of the JMX agent
* @param port TCP port of the remote JMX agent
* @throws IOException on connection failures
*/
public NodeProbe(String host, int port, String username, String password) throws IOException
{
assert username != null && !username.isEmpty() && password != null && !password.isEmpty()
: "neither username nor password can be blank";
this.host = host;
this.port = port;
this.username = username;
this.password = password;
connect();
}
/**
* Creates a NodeProbe using the specified JMX host and port.
*
* @param host hostname or IP address of the JMX agent
* @param port TCP port of the remote JMX agent
* @throws IOException on connection failures
*/
public NodeProbe(String host, int port) throws IOException
{
this.host = host;
this.port = port;
connect();
}
/**
* Creates a NodeProbe using the specified JMX host and default port.
*
* @param host hostname or IP address of the JMX agent
* @throws IOException on connection failures
*/
public NodeProbe(String host) throws IOException
{
this.host = host;
this.port = defaultPort;
connect();
}
/**
* Create a connection to the JMX agent and setup the M[X]Bean proxies.
*
* @throws IOException on connection failures
*/
private void connect() throws IOException
{
JMXServiceURL jmxUrl = new JMXServiceURL(String.format(fmtUrl, host, port));
Map<String,Object> env = new HashMap<String,Object>();
if (username != null)
{
String[] creds = { username, password };
env.put(JMXConnector.CREDENTIALS, creds);
}
jmxc = JMXConnectorFactory.connect(jmxUrl, env);
mbeanServerConn = jmxc.getMBeanServerConnection();
try
{
ObjectName name = new ObjectName(ssObjName);
ssProxy = JMX.newMBeanProxy(mbeanServerConn, name, StorageServiceMBean.class);
name = new ObjectName(MessagingService.MBEAN_NAME);
msProxy = JMX.newMBeanProxy(mbeanServerConn, name, MessagingServiceMBean.class);
name = new ObjectName(StreamManagerMBean.OBJECT_NAME);
streamProxy = JMX.newMBeanProxy(mbeanServerConn, name, StreamManagerMBean.class);
name = new ObjectName(CompactionManager.MBEAN_OBJECT_NAME);
compactionProxy = JMX.newMBeanProxy(mbeanServerConn, name, CompactionManagerMBean.class);
name = new ObjectName(FailureDetector.MBEAN_NAME);
fdProxy = JMX.newMBeanProxy(mbeanServerConn, name, FailureDetectorMBean.class);
name = new ObjectName(CacheService.MBEAN_NAME);
cacheService = JMX.newMBeanProxy(mbeanServerConn, name, CacheServiceMBean.class);
name = new ObjectName(StorageProxy.MBEAN_NAME);
spProxy = JMX.newMBeanProxy(mbeanServerConn, name, StorageProxyMBean.class);
name = new ObjectName(HintedHandOffManager.MBEAN_NAME);
hhProxy = JMX.newMBeanProxy(mbeanServerConn, name, HintedHandOffManagerMBean.class);
} catch (MalformedObjectNameException e)
{
throw new RuntimeException(
"Invalid ObjectName? Please report this as a bug.", e);
}
memProxy = ManagementFactory.newPlatformMXBeanProxy(mbeanServerConn,
ManagementFactory.MEMORY_MXBEAN_NAME, MemoryMXBean.class);
runtimeProxy = ManagementFactory.newPlatformMXBeanProxy(
mbeanServerConn, ManagementFactory.RUNTIME_MXBEAN_NAME, RuntimeMXBean.class);
}
public void close() throws IOException
{
jmxc.close();
}
public void forceKeyspaceCleanup(String keyspaceName, String... columnFamilies) throws IOException, ExecutionException, InterruptedException
{
ssProxy.forceKeyspaceCleanup(keyspaceName, columnFamilies);
}
public void scrub(String keyspaceName, String... columnFamilies) throws IOException, ExecutionException, InterruptedException
{
ssProxy.scrub(keyspaceName, columnFamilies);
}
public void upgradeSSTables(String keyspaceName, boolean excludeCurrentVersion, String... columnFamilies) throws IOException, ExecutionException, InterruptedException
{
ssProxy.upgradeSSTables(keyspaceName, excludeCurrentVersion, columnFamilies);
}
public void forceKeyspaceCompaction(String keyspaceName, String... columnFamilies) throws IOException, ExecutionException, InterruptedException
{
ssProxy.forceKeyspaceCompaction(keyspaceName, columnFamilies);
}
public void forceKeyspaceFlush(String keyspaceName, String... columnFamilies) throws IOException, ExecutionException, InterruptedException
{
ssProxy.forceKeyspaceFlush(keyspaceName, columnFamilies);
}
public void forceKeyspaceRepair(String keyspaceName, boolean isSequential, boolean isLocal, String... columnFamilies) throws IOException
{
ssProxy.forceKeyspaceRepair(keyspaceName, isSequential, isLocal, columnFamilies);
}
public void forceRepairAsync(final PrintStream out, final String keyspaceName, boolean isSequential, boolean isLocal, boolean primaryRange, String... columnFamilies) throws IOException
{
RepairRunner runner = new RepairRunner(out, keyspaceName, columnFamilies);
try
{
ssProxy.addNotificationListener(runner, null, null);
if (!runner.repairAndWait(ssProxy, isSequential, isLocal, primaryRange))
failed = true;
}
catch (Exception e)
{
throw new IOException(e) ;
}
finally
{
try
{
ssProxy.removeNotificationListener(runner);
}
catch (ListenerNotFoundException ignored) {}
}
}
public void forceRepairRangeAsync(final PrintStream out, final String keyspaceName, boolean isSequential, boolean isLocal, final String startToken, final String endToken, String... columnFamilies) throws IOException
{
RepairRunner runner = new RepairRunner(out, keyspaceName, columnFamilies);
try
{
ssProxy.addNotificationListener(runner, null, null);
if (!runner.repairRangeAndWait(ssProxy, isSequential, isLocal, startToken, endToken))
failed = true;
}
catch (Exception e)
{
throw new IOException(e) ;
}
finally
{
try
{
ssProxy.removeNotificationListener(runner);
}
catch (ListenerNotFoundException ignored) {}
}
}
public void forceKeyspaceRepairPrimaryRange(String keyspaceName, boolean isSequential, boolean isLocal, String... columnFamilies) throws IOException
{
ssProxy.forceKeyspaceRepairPrimaryRange(keyspaceName, isSequential, isLocal, columnFamilies);
}
public void forceKeyspaceRepairRange(String beginToken, String endToken, String keyspaceName, boolean isSequential, boolean isLocal, String... columnFamilies) throws IOException
{
ssProxy.forceKeyspaceRepairRange(beginToken, endToken, keyspaceName, isSequential, isLocal, columnFamilies);
}
public void invalidateKeyCache()
{
cacheService.invalidateKeyCache();
}
public void invalidateRowCache()
{
cacheService.invalidateRowCache();
}
public void drain() throws IOException, InterruptedException, ExecutionException
{
ssProxy.drain();
}
public Map<String, String> getTokenToEndpointMap()
{
return ssProxy.getTokenToEndpointMap();
}
public List<String> getLiveNodes()
{
return ssProxy.getLiveNodes();
}
public List<String> getJoiningNodes()
{
return ssProxy.getJoiningNodes();
}
public List<String> getLeavingNodes()
{
return ssProxy.getLeavingNodes();
}
public List<String> getMovingNodes()
{
return ssProxy.getMovingNodes();
}
public List<String> getUnreachableNodes()
{
return ssProxy.getUnreachableNodes();
}
public Map<String, String> getLoadMap()
{
return ssProxy.getLoadMap();
}
public Map<InetAddress, Float> getOwnership()
{
return ssProxy.getOwnership();
}
public Map<InetAddress, Float> effectiveOwnership(String keyspace) throws IllegalStateException
{
return ssProxy.effectiveOwnership(keyspace);
}
public CacheServiceMBean getCacheServiceMBean()
{
String cachePath = "org.apache.cassandra.db:type=Caches";
try
{
return JMX.newMBeanProxy(mbeanServerConn, new ObjectName(cachePath), CacheServiceMBean.class);
}
catch (MalformedObjectNameException e)
{
throw new RuntimeException(e);
}
}
public Iterator<Map.Entry<String, ColumnFamilyStoreMBean>> getColumnFamilyStoreMBeanProxies()
{
try
{
return new ColumnFamilyStoreMBeanIterator(mbeanServerConn);
}
catch (MalformedObjectNameException e)
{
throw new RuntimeException("Invalid ObjectName? Please report this as a bug.", e);
}
catch (IOException e)
{
throw new RuntimeException("Could not retrieve list of stat mbeans.", e);
}
}
public CompactionManagerMBean getCompactionManagerProxy()
{
return compactionProxy;
}
public List<String> getTokens()
{
return ssProxy.getTokens();
}
public List<String> getTokens(String endpoint)
{
try
{
return ssProxy.getTokens(endpoint);
}
catch (UnknownHostException e)
{
throw new RuntimeException(e);
}
}
public String getLocalHostId()
{
return ssProxy.getLocalHostId();
}
public Map<String, String> getHostIdMap()
{
return ssProxy.getHostIdMap();
}
public String getLoadString()
{
return ssProxy.getLoadString();
}
public String getReleaseVersion()
{
return ssProxy.getReleaseVersion();
}
public int getCurrentGenerationNumber()
{
return ssProxy.getCurrentGenerationNumber();
}
public long getUptime()
{
return runtimeProxy.getUptime();
}
public MemoryUsage getHeapMemoryUsage()
{
return memProxy.getHeapMemoryUsage();
}
/**
* Take a snapshot of all the keyspaces, optionally specifying only a specific column family.
*
* @param snapshotName the name of the snapshot.
* @param columnFamily the column family to snapshot or all on null
* @param keyspaces the keyspaces to snapshot
*/
public void takeSnapshot(String snapshotName, String columnFamily, String... keyspaces) throws IOException
{
if (columnFamily != null)
{
if (keyspaces.length != 1)
{
throw new IOException("When specifying the column family for a snapshot, you must specify one and only one keyspace");
}
ssProxy.takeColumnFamilySnapshot(keyspaces[0], columnFamily, snapshotName);
}
else
ssProxy.takeSnapshot(snapshotName, keyspaces);
}
/**
* Remove all the existing snapshots.
*/
public void clearSnapshot(String tag, String... keyspaces) throws IOException
{
ssProxy.clearSnapshot(tag, keyspaces);
}
public boolean isJoined()
{
return ssProxy.isJoined();
}
public void joinRing() throws IOException
{
ssProxy.joinRing();
}
public void decommission() throws InterruptedException
{
ssProxy.decommission();
}
public void move(String newToken) throws IOException
{
ssProxy.move(newToken);
}
public void removeNode(String token)
{
ssProxy.removeNode(token);
}
public String getRemovalStatus()
{
return ssProxy.getRemovalStatus();
}
public void forceRemoveCompletion()
{
ssProxy.forceRemoveCompletion();
}
public Iterator<Map.Entry<String, JMXEnabledThreadPoolExecutorMBean>> getThreadPoolMBeanProxies()
{
try
{
return new ThreadPoolProxyMBeanIterator(mbeanServerConn);
}
catch (MalformedObjectNameException e)
{
throw new RuntimeException("Invalid ObjectName? Please report this as a bug.", e);
}
catch (IOException e)
{
throw new RuntimeException("Could not retrieve list of stat mbeans.", e);
}
}
/**
* Set the compaction threshold
*
* @param minimumCompactionThreshold minimum compaction threshold
* @param maximumCompactionThreshold maximum compaction threshold
*/
public void setCompactionThreshold(String ks, String cf, int minimumCompactionThreshold, int maximumCompactionThreshold)
{
ColumnFamilyStoreMBean cfsProxy = getCfsProxy(ks, cf);
cfsProxy.setCompactionThresholds(minimumCompactionThreshold, maximumCompactionThreshold);
}
public void disableAutoCompaction(String ks, String ... columnFamilies) throws IOException
{
ssProxy.disableAutoCompaction(ks, columnFamilies);
}
public void enableAutoCompaction(String ks, String ... columnFamilies) throws IOException
{
ssProxy.enableAutoCompaction(ks, columnFamilies);
}
public void setIncrementalBackupsEnabled(boolean enabled)
{
ssProxy.setIncrementalBackupsEnabled(enabled);
}
public void setCacheCapacities(int keyCacheCapacity, int rowCacheCapacity)
{
try
{
String keyCachePath = "org.apache.cassandra.db:type=Caches";
CacheServiceMBean cacheMBean = JMX.newMBeanProxy(mbeanServerConn, new ObjectName(keyCachePath), CacheServiceMBean.class);
cacheMBean.setKeyCacheCapacityInMB(keyCacheCapacity);
cacheMBean.setRowCacheCapacityInMB(rowCacheCapacity);
}
catch (MalformedObjectNameException e)
{
throw new RuntimeException(e);
}
}
public List<InetAddress> getEndpoints(String keyspace, String cf, String key)
{
return ssProxy.getNaturalEndpoints(keyspace, cf, key);
}
public List<String> getSSTables(String keyspace, String cf, String key)
{
ColumnFamilyStoreMBean cfsProxy = getCfsProxy(keyspace, cf);
return cfsProxy.getSSTablesForKey(key);
}
public Set<StreamState> getStreamStatus()
{
return streamProxy.getCurrentStreams();
}
public String getOperationMode()
{
return ssProxy.getOperationMode();
}
public void truncate(String keyspaceName, String cfName)
{
try
{
ssProxy.truncate(keyspaceName, cfName);
}
catch (TimeoutException e)
{
throw new RuntimeException("Error while executing truncate", e);
}
catch (IOException e)
{
throw new RuntimeException("Error while executing truncate", e);
}
}
public EndpointSnitchInfoMBean getEndpointSnitchInfoProxy()
{
try
{
return JMX.newMBeanProxy(mbeanServerConn, new ObjectName("org.apache.cassandra.db:type=EndpointSnitchInfo"), EndpointSnitchInfoMBean.class);
}
catch (MalformedObjectNameException e)
{
throw new RuntimeException(e);
}
}
public ColumnFamilyStoreMBean getCfsProxy(String ks, String cf)
{
ColumnFamilyStoreMBean cfsProxy = null;
try
{
String type = cf.contains(".") ? "IndexColumnFamilies" : "ColumnFamilies";
Set<ObjectName> beans = mbeanServerConn.queryNames(
new ObjectName("org.apache.cassandra.db:type=*" + type +",keyspace=" + ks + ",columnfamily=" + cf), null);
if (beans.isEmpty())
throw new MalformedObjectNameException("couldn't find that bean");
assert beans.size() == 1;
for (ObjectName bean : beans)
cfsProxy = JMX.newMBeanProxy(mbeanServerConn, bean, ColumnFamilyStoreMBean.class);
}
catch (MalformedObjectNameException mone)
{
System.err.println("ColumnFamilyStore for " + ks + "/" + cf + " not found.");
System.exit(1);
}
catch (IOException e)
{
System.err.println("ColumnFamilyStore for " + ks + "/" + cf + " not found: " + e);
System.exit(1);
}
return cfsProxy;
}
public StorageProxyMBean getSpProxy()
{
return spProxy;
}
public String getEndpoint()
{
// Try to find the endpoint using the local token, doing so in a crazy manner
// to maintain backwards compatibility with the MBean interface
String stringToken = ssProxy.getTokens().get(0);
Map<String, String> tokenToEndpoint = ssProxy.getTokenToEndpointMap();
for (Map.Entry<String, String> pair : tokenToEndpoint.entrySet())
{
if (pair.getKey().toString().equals(stringToken))
{
return pair.getValue();
}
}
throw new RuntimeException("Could not find myself in the endpoint list, something is very wrong! Is the Cassandra node fully started?");
}
public String getDataCenter()
{
try
{
return getEndpointSnitchInfoProxy().getDatacenter(getEndpoint());
}
catch (UnknownHostException e)
{
return "Unknown";
}
}
public String getRack()
{
try
{
return getEndpointSnitchInfoProxy().getRack(getEndpoint());
}
catch (UnknownHostException e)
{
return "Unknown";
}
}
public List<String> getKeyspaces()
{
return ssProxy.getKeyspaces();
}
public void disableHintedHandoff()
{
spProxy.setHintedHandoffEnabled(false);
}
public void enableHintedHandoff()
{
spProxy.setHintedHandoffEnabled(true);
}
public void pauseHintsDelivery()
{
hhProxy.pauseHintsDelivery(true);
}
public void resumeHintsDelivery()
{
hhProxy.pauseHintsDelivery(false);
}
public void stopNativeTransport()
{
ssProxy.stopNativeTransport();
}
public void startNativeTransport()
{
ssProxy.startNativeTransport();
}
public boolean isNativeTransportRunning()
{
return ssProxy.isNativeTransportRunning();
}
public void stopGossiping()
{
ssProxy.stopGossiping();
}
public void startGossiping()
{
ssProxy.startGossiping();
}
public void stopThriftServer()
{
ssProxy.stopRPCServer();
}
public void startThriftServer()
{
ssProxy.startRPCServer();
}
public boolean isThriftServerRunning()
{
return ssProxy.isRPCServerRunning();
}
public boolean isInitialized()
{
return ssProxy.isInitialized();
}
public void setCompactionThroughput(int value)
{
ssProxy.setCompactionThroughputMbPerSec(value);
}
public int getCompactionThroughput()
{
return ssProxy.getCompactionThroughputMbPerSec();
}
public int getStreamThroughput()
{
return ssProxy.getStreamThroughputMbPerSec();
}
public int getExceptionCount()
{
return ssProxy.getExceptionCount();
}
public Map<String, Integer> getDroppedMessages()
{
return msProxy.getDroppedMessages();
}
public void loadNewSSTables(String ksName, String cfName)
{
ssProxy.loadNewSSTables(ksName, cfName);
}
public void rebuildIndex(String ksName, String cfName, String... idxNames)
{
ssProxy.rebuildSecondaryIndex(ksName, cfName, idxNames);
}
public String getGossipInfo()
{
return fdProxy.getAllEndpointStates();
}
public void stop(String string)
{
compactionProxy.stopCompaction(string);
}
public void setStreamThroughput(int value)
{
ssProxy.setStreamThroughputMbPerSec(value);
}
public void setTraceProbability(double value)
{
ssProxy.setTraceProbability(value);
}
public String getSchemaVersion()
{
return ssProxy.getSchemaVersion();
}
public List<String> describeRing(String keyspaceName) throws IOException
{
return ssProxy.describeRingJMX(keyspaceName);
}
public void rebuild(String sourceDc)
{
ssProxy.rebuild(sourceDc);
}
public List<String> sampleKeyRange()
{
return ssProxy.sampleKeyRange();
}
public void resetLocalSchema() throws IOException
{
ssProxy.resetLocalSchema();
}
public boolean isFailed()
{
return failed;
}
public long getReadRepairAttempted()
{
return spProxy.getReadRepairAttempted();
}
public long getReadRepairRepairedBlocking()
{
return spProxy.getReadRepairRepairedBlocking();
}
public long getReadRepairRepairedBackground()
{
return spProxy.getReadRepairRepairedBackground();
}
}
class ColumnFamilyStoreMBeanIterator implements Iterator<Map.Entry<String, ColumnFamilyStoreMBean>>
{
private MBeanServerConnection mbeanServerConn;
Iterator<Entry<String, ColumnFamilyStoreMBean>> mbeans;
public ColumnFamilyStoreMBeanIterator(MBeanServerConnection mbeanServerConn)
throws MalformedObjectNameException, NullPointerException, IOException
{
this.mbeanServerConn = mbeanServerConn;
List<Entry<String, ColumnFamilyStoreMBean>> cfMbeans = getCFSMBeans(mbeanServerConn, "ColumnFamilies");
cfMbeans.addAll(getCFSMBeans(mbeanServerConn, "IndexColumnFamilies"));
Collections.sort(cfMbeans, new Comparator<Entry<String, ColumnFamilyStoreMBean>>()
{
public int compare(Entry<String, ColumnFamilyStoreMBean> e1, Entry<String, ColumnFamilyStoreMBean> e2)
{
//compare keyspace, then CF name, then normal vs. index
int keyspaceNameCmp = e1.getKey().compareTo(e2.getKey());
if(keyspaceNameCmp != 0)
return keyspaceNameCmp;
// get CF name and split it for index name
String e1CF[] = e1.getValue().getColumnFamilyName().split("\\.");
String e2CF[] = e1.getValue().getColumnFamilyName().split("\\.");
assert e1CF.length <= 2 && e2CF.length <= 2 : "unexpected split count for column family name";
//if neither are indexes, just compare CF names
if(e1CF.length == 1 && e2CF.length == 1)
return e1CF[0].compareTo(e2CF[0]);
//check if it's the same CF
int cfNameCmp = e1CF[0].compareTo(e2CF[0]);
if(cfNameCmp != 0)
return cfNameCmp;
// if both are indexes (for the same CF), compare them
if(e1CF.length == 2 && e2CF.length == 2)
return e1CF[1].compareTo(e2CF[1]);
//if length of e1CF is 1, it's not an index, so sort it higher
return e1CF.length == 1 ? 1 : -1;
}
});
mbeans = cfMbeans.iterator();
}
private List<Entry<String, ColumnFamilyStoreMBean>> getCFSMBeans(MBeanServerConnection mbeanServerConn, String type)
throws MalformedObjectNameException, IOException
{
ObjectName query = new ObjectName("org.apache.cassandra.db:type=" + type +",*");
Set<ObjectName> cfObjects = mbeanServerConn.queryNames(query, null);
List<Entry<String, ColumnFamilyStoreMBean>> mbeans = new ArrayList<Entry<String, ColumnFamilyStoreMBean>>(cfObjects.size());
for(ObjectName n : cfObjects)
{
String keyspaceName = n.getKeyProperty("keyspace");
ColumnFamilyStoreMBean cfsProxy = JMX.newMBeanProxy(mbeanServerConn, n, ColumnFamilyStoreMBean.class);
mbeans.add(new AbstractMap.SimpleImmutableEntry<String, ColumnFamilyStoreMBean>(keyspaceName, cfsProxy));
}
return mbeans;
}
public boolean hasNext()
{
return mbeans.hasNext();
}
public Entry<String, ColumnFamilyStoreMBean> next()
{
return mbeans.next();
}
public void remove()
{
throw new UnsupportedOperationException();
}
}
class ThreadPoolProxyMBeanIterator implements Iterator<Map.Entry<String, JMXEnabledThreadPoolExecutorMBean>>
{
private final Iterator<ObjectName> resIter;
private final MBeanServerConnection mbeanServerConn;
public ThreadPoolProxyMBeanIterator(MBeanServerConnection mbeanServerConn)
throws MalformedObjectNameException, NullPointerException, IOException
{
Set<ObjectName> requests = mbeanServerConn.queryNames(new ObjectName("org.apache.cassandra.request:type=*"), null);
Set<ObjectName> internal = mbeanServerConn.queryNames(new ObjectName("org.apache.cassandra.internal:type=*"), null);
resIter = Iterables.concat(requests, internal).iterator();
this.mbeanServerConn = mbeanServerConn;
}
public boolean hasNext()
{
return resIter.hasNext();
}
public Map.Entry<String, JMXEnabledThreadPoolExecutorMBean> next()
{
ObjectName objectName = resIter.next();
String poolName = objectName.getKeyProperty("type");
JMXEnabledThreadPoolExecutorMBean threadPoolProxy = JMX.newMBeanProxy(mbeanServerConn, objectName, JMXEnabledThreadPoolExecutorMBean.class);
return new AbstractMap.SimpleImmutableEntry<String, JMXEnabledThreadPoolExecutorMBean>(poolName, threadPoolProxy);
}
public void remove()
{
throw new UnsupportedOperationException();
}
}
class RepairRunner implements NotificationListener
{
private final SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS");
private final Condition condition = new SimpleCondition();
private final PrintStream out;
private final String keyspace;
private final String[] columnFamilies;
private int cmd;
private boolean success = true;
RepairRunner(PrintStream out, String keyspace, String... columnFamilies)
{
this.out = out;
this.keyspace = keyspace;
this.columnFamilies = columnFamilies;
}
public boolean repairAndWait(StorageServiceMBean ssProxy, boolean isSequential, boolean isLocal, boolean primaryRangeOnly) throws InterruptedException
{
cmd = ssProxy.forceRepairAsync(keyspace, isSequential, isLocal, primaryRangeOnly, columnFamilies);
if (cmd > 0)
{
condition.await();
}
else
{
String message = String.format("[%s] Nothing to repair for keyspace '%s'", format.format(System.currentTimeMillis()), keyspace);
out.println(message);
}
return success;
}
public boolean repairRangeAndWait(StorageServiceMBean ssProxy, boolean isSequential, boolean isLocal, String startToken, String endToken) throws InterruptedException
{
cmd = ssProxy.forceRepairRangeAsync(startToken, endToken, keyspace, isSequential, isLocal, columnFamilies);
if (cmd > 0)
{
condition.await();
}
else
{
String message = String.format("[%s] Nothing to repair for keyspace '%s'", format.format(System.currentTimeMillis()), keyspace);
out.println(message);
}
return success;
}
public void handleNotification(Notification notification, Object handback)
{
if ("repair".equals(notification.getType()))
{
int[] status = (int[]) notification.getUserData();
assert status.length == 2;
if (cmd == status[0])
{
String message = String.format("[%s] %s", format.format(notification.getTimeStamp()), notification.getMessage());
out.println(message);
// repair status is int array with [0] = cmd number, [1] = status
if (status[1] == ActiveRepairService.Status.SESSION_FAILED.ordinal())
success = false;
else if (status[1] == ActiveRepairService.Status.FINISHED.ordinal())
condition.signalAll();
}
}
}
}
| apache-2.0 |
googleapis/java-vision | google-cloud-vision/src/main/java/com/google/cloud/vision/v1p3beta1/stub/ImageAnnotatorStub.java | 2519 | /*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.vision.v1p3beta1.stub;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.vision.v1p3beta1.AsyncBatchAnnotateFilesRequest;
import com.google.cloud.vision.v1p3beta1.AsyncBatchAnnotateFilesResponse;
import com.google.cloud.vision.v1p3beta1.BatchAnnotateImagesRequest;
import com.google.cloud.vision.v1p3beta1.BatchAnnotateImagesResponse;
import com.google.cloud.vision.v1p3beta1.OperationMetadata;
import com.google.longrunning.Operation;
import com.google.longrunning.stub.OperationsStub;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Base stub class for the ImageAnnotator service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@BetaApi
@Generated("by gapic-generator-java")
public abstract class ImageAnnotatorStub implements BackgroundResource {
public OperationsStub getOperationsStub() {
throw new UnsupportedOperationException("Not implemented: getOperationsStub()");
}
public UnaryCallable<BatchAnnotateImagesRequest, BatchAnnotateImagesResponse>
batchAnnotateImagesCallable() {
throw new UnsupportedOperationException("Not implemented: batchAnnotateImagesCallable()");
}
public OperationCallable<
AsyncBatchAnnotateFilesRequest, AsyncBatchAnnotateFilesResponse, OperationMetadata>
asyncBatchAnnotateFilesOperationCallable() {
throw new UnsupportedOperationException(
"Not implemented: asyncBatchAnnotateFilesOperationCallable()");
}
public UnaryCallable<AsyncBatchAnnotateFilesRequest, Operation>
asyncBatchAnnotateFilesCallable() {
throw new UnsupportedOperationException("Not implemented: asyncBatchAnnotateFilesCallable()");
}
@Override
public abstract void close();
}
| apache-2.0 |
deleidos/digitaledge-platform | commons-cloud/src/main/java/com/deleidos/rtws/commons/cloud/util/ComputeResourcesFetcher.java | 12175 | /**
* Apache License
* Version 2.0, January 2004
* http://www.apache.org/licenses/
*
* TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
*
* 1. Definitions.
*
* "License" shall mean the terms and conditions for use, reproduction,
* and distribution as defined by Sections 1 through 9 of this document.
*
* "Licensor" shall mean the copyright owner or entity authorized by
* the copyright owner that is granting the License.
*
* "Legal Entity" shall mean the union of the acting entity and all
* other entities that control, are controlled by, or are under common
* control with that entity. For the purposes of this definition,
* "control" means (i) the power, direct or indirect, to cause the
* direction or management of such entity, whether by contract or
* otherwise, or (ii) ownership of fifty percent (50%) or more of the
* outstanding shares, or (iii) beneficial ownership of such entity.
*
* "You" (or "Your") shall mean an individual or Legal Entity
* exercising permissions granted by this License.
*
* "Source" form shall mean the preferred form for making modifications,
* including but not limited to software source code, documentation
* source, and configuration files.
*
* "Object" form shall mean any form resulting from mechanical
* transformation or translation of a Source form, including but
* not limited to compiled object code, generated documentation,
* and conversions to other media types.
*
* "Work" shall mean the work of authorship, whether in Source or
* Object form, made available under the License, as indicated by a
* copyright notice that is included in or attached to the work
* (an example is provided in the Appendix below).
*
* "Derivative Works" shall mean any work, whether in Source or Object
* form, that is based on (or derived from) the Work and for which the
* editorial revisions, annotations, elaborations, or other modifications
* represent, as a whole, an original work of authorship. For the purposes
* of this License, Derivative Works shall not include works that remain
* separable from, or merely link (or bind by name) to the interfaces of,
* the Work and Derivative Works thereof.
*
* "Contribution" shall mean any work of authorship, including
* the original version of the Work and any modifications or additions
* to that Work or Derivative Works thereof, that is intentionally
* submitted to Licensor for inclusion in the Work by the copyright owner
* or by an individual or Legal Entity authorized to submit on behalf of
* the copyright owner. For the purposes of this definition, "submitted"
* means any form of electronic, verbal, or written communication sent
* to the Licensor or its representatives, including but not limited to
* communication on electronic mailing lists, source code control systems,
* and issue tracking systems that are managed by, or on behalf of, the
* Licensor for the purpose of discussing and improving the Work, but
* excluding communication that is conspicuously marked or otherwise
* designated in writing by the copyright owner as "Not a Contribution."
*
* "Contributor" shall mean Licensor and any individual or Legal Entity
* on behalf of whom a Contribution has been received by Licensor and
* subsequently incorporated within the Work.
*
* 2. Grant of Copyright License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* copyright license to reproduce, prepare Derivative Works of,
* publicly display, publicly perform, sublicense, and distribute the
* Work and such Derivative Works in Source or Object form.
*
* 3. Grant of Patent License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* (except as stated in this section) patent license to make, have made,
* use, offer to sell, sell, import, and otherwise transfer the Work,
* where such license applies only to those patent claims licensable
* by such Contributor that are necessarily infringed by their
* Contribution(s) alone or by combination of their Contribution(s)
* with the Work to which such Contribution(s) was submitted. If You
* institute patent litigation against any entity (including a
* cross-claim or counterclaim in a lawsuit) alleging that the Work
* or a Contribution incorporated within the Work constitutes direct
* or contributory patent infringement, then any patent licenses
* granted to You under this License for that Work shall terminate
* as of the date such litigation is filed.
*
* 4. Redistribution. You may reproduce and distribute copies of the
* Work or Derivative Works thereof in any medium, with or without
* modifications, and in Source or Object form, provided that You
* meet the following conditions:
*
* (a) You must give any other recipients of the Work or
* Derivative Works a copy of this License; and
*
* (b) You must cause any modified files to carry prominent notices
* stating that You changed the files; and
*
* (c) You must retain, in the Source form of any Derivative Works
* that You distribute, all copyright, patent, trademark, and
* attribution notices from the Source form of the Work,
* excluding those notices that do not pertain to any part of
* the Derivative Works; and
*
* (d) If the Work includes a "NOTICE" text file as part of its
* distribution, then any Derivative Works that You distribute must
* include a readable copy of the attribution notices contained
* within such NOTICE file, excluding those notices that do not
* pertain to any part of the Derivative Works, in at least one
* of the following places: within a NOTICE text file distributed
* as part of the Derivative Works; within the Source form or
* documentation, if provided along with the Derivative Works; or,
* within a display generated by the Derivative Works, if and
* wherever such third-party notices normally appear. The contents
* of the NOTICE file are for informational purposes only and
* do not modify the License. You may add Your own attribution
* notices within Derivative Works that You distribute, alongside
* or as an addendum to the NOTICE text from the Work, provided
* that such additional attribution notices cannot be construed
* as modifying the License.
*
* You may add Your own copyright statement to Your modifications and
* may provide additional or different license terms and conditions
* for use, reproduction, or distribution of Your modifications, or
* for any such Derivative Works as a whole, provided Your use,
* reproduction, and distribution of the Work otherwise complies with
* the conditions stated in this License.
*
* 5. Submission of Contributions. Unless You explicitly state otherwise,
* any Contribution intentionally submitted for inclusion in the Work
* by You to the Licensor shall be under the terms and conditions of
* this License, without any additional terms or conditions.
* Notwithstanding the above, nothing herein shall supersede or modify
* the terms of any separate license agreement you may have executed
* with Licensor regarding such Contributions.
*
* 6. Trademarks. This License does not grant permission to use the trade
* names, trademarks, service marks, or product names of the Licensor,
* except as required for reasonable and customary use in describing the
* origin of the Work and reproducing the content of the NOTICE file.
*
* 7. Disclaimer of Warranty. Unless required by applicable law or
* agreed to in writing, Licensor provides the Work (and each
* Contributor provides its Contributions) on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied, including, without limitation, any warranties or conditions
* of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
* PARTICULAR PURPOSE. You are solely responsible for determining the
* appropriateness of using or redistributing the Work and assume any
* risks associated with Your exercise of permissions under this License.
*
* 8. Limitation of Liability. In no event and under no legal theory,
* whether in tort (including negligence), contract, or otherwise,
* unless required by applicable law (such as deliberate and grossly
* negligent acts) or agreed to in writing, shall any Contributor be
* liable to You for damages, including any direct, indirect, special,
* incidental, or consequential damages of any character arising as a
* result of this License or out of the use or inability to use the
* Work (including but not limited to damages for loss of goodwill,
* work stoppage, computer failure or malfunction, or any and all
* other commercial damages or losses), even if such Contributor
* has been advised of the possibility of such damages.
*
* 9. Accepting Warranty or Additional Liability. While redistributing
* the Work or Derivative Works thereof, You may choose to offer,
* and charge a fee for, acceptance of support, warranty, indemnity,
* or other liability obligations and/or rights consistent with this
* License. However, in accepting such obligations, You may act only
* on Your own behalf and on Your sole responsibility, not on behalf
* of any other Contributor, and only if You agree to indemnify,
* defend, and hold each Contributor harmless for any liability
* incurred by, or claims asserted against, such Contributor by reason
* of your accepting any such warranty or additional liability.
*
* END OF TERMS AND CONDITIONS
*
* APPENDIX: How to apply the Apache License to your work.
*
* To apply the Apache License to your work, attach the following
* boilerplate notice, with the fields enclosed by brackets "{}"
* replaced with your own identifying information. (Don't include
* the brackets!) The text should be enclosed in the appropriate
* comment syntax for the file format. We also recommend that a
* file or class name and description of purpose be included on the
* same "printed page" as the copyright notice for easier
* identification within third-party archives.
*
* Copyright {yyyy} {name of copyright owner}
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.deleidos.rtws.commons.cloud.util;
import java.util.List;
import com.deleidos.rtws.commons.cloud.environment.monitor.representations.Instance;
public interface ComputeResourcesFetcher {
public List<Instance> compute();
}
| apache-2.0 |
AdamsTHDev/e-learning-web | src/main/java/com/adms/elearning/web/bean/base/AbstractSearchBean.java | 344 | package com.adms.elearning.web.bean.base;
public abstract class AbstractSearchBean<T> extends BaseBean implements ISearchBean<T>{
/**
*
*/
private static final long serialVersionUID = 55877643186617076L;
private final Integer rowPerPage = new Integer(20);
public Integer getRowPerPage() {
return rowPerPage;
}
}
| apache-2.0 |
searchtechnologies/heritrix-connector | engine-3.1.1/modules/src/main/java/org/archive/modules/net/ServerCache.java | 3254 | /*
* This file is part of the Heritrix web crawler (crawler.archive.org).
*
* Licensed to the Internet Archive (IA) by one or more individual
* contributors.
*
* The IA licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.archive.modules.net;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.collections.Closure;
import org.apache.commons.httpclient.URIException;
import org.archive.net.UURI;
/**
* Abstract class for crawl-global registry of CrawlServer (host:port) and
* CrawlHost (hostname) objects.
*/
public abstract class ServerCache {
public abstract CrawlHost getHostFor(String host);
public abstract CrawlServer getServerFor(String serverKey);
/**
* Utility for performing an action on every CrawlHost.
*
* @param action 1-argument Closure to apply to each CrawlHost
*/
public abstract void forAllHostsDo(Closure action);
private static Logger logger =
Logger.getLogger(ServerCache.class.getName());
/**
* Get the {@link CrawlHost} associated with <code>curi</code>.
* @param uuri CandidateURI we're to return Host for.
* @return CandidateURI instance that matches the passed Host name.
*/
public CrawlHost getHostFor(UURI uuri) {
CrawlHost h = null;
try {
if (uuri.getScheme().equals("dns")) {
h = getHostFor("dns:");
} else if (uuri.getScheme().equals("whois")) {
h = getHostFor("whois:");
} else {
h = getHostFor(uuri.getReferencedHost());
}
} catch (URIException e) {
logger.log(Level.SEVERE, uuri.toString(), e);
}
return h;
}
/**
* Get the {@link CrawlServer} associated with <code>curi</code>.
* @param uuri CandidateURI we're to get server from.
* @return CrawlServer instance that matches the passed CandidateURI.
*/
public CrawlServer getServerFor(UURI uuri) {
CrawlServer cs = null;
try {
String key = CrawlServer.getServerKey(uuri);
// TODOSOMEDAY: make this robust against those rare cases
// where authority is not a hostname.
if (key != null) {
cs = getServerFor(key);
}
} catch (URIException e) {
logger.log(
Level.FINE, "No server key obtainable: "+uuri.toString(), e);
} catch (NullPointerException npe) {
logger.log(
Level.FINE, "No server key obtainable: "+uuri.toString(), npe);
}
return cs;
}
abstract public Set<String> hostKeys();
}
| apache-2.0 |
sdcuike/beaver | src/test/java/com/doctor/commons/IDCardUtilsTest.java | 6184 | package com.doctor.commons;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.hamcrest.core.IsEqual;
import org.junit.Assert;
import org.junit.Test;
import com.fasterxml.jackson.databind.ObjectMapper;
public class IDCardUtilsTest {
@Test
public void testTransformIdCard15to18() {
String idCard15 = "110105710923582";
String idCard18 = "110105197109235829";
String transformIdCard15to18 = IDCardUtils.transformIdCard15to18(idCard15);
Assert.assertThat(transformIdCard15to18, IsEqual.equalTo(idCard18));
idCard15 = "522634520829128";
idCard18 = "522634195208291285";
transformIdCard15to18 = IDCardUtils.transformIdCard15to18(idCard15);
Assert.assertThat(transformIdCard15to18, IsEqual.equalTo(idCard18));
}
@Test
public void test() {
Set<Entry<String, String>> entrySet = IDCardUtils.AreCodeUtil.entrySet();
ArrayList<Entry<String, String>> list = new ArrayList<>(entrySet);
Collections.sort(list, new Comparator<Entry<String, String>>() {
public int compare(Entry<String, String> a, Entry<String, String> b) {
return a.getKey().compareTo(b.getKey());
}
});
for (Entry<String, String> entry : list) {
if (entry.getKey().length() == 2) {
System.out.println(entry.getKey() + "==" + entry.getValue());
}
}
}
/**
* 生成省、市map结构
*
* @throws IOException
* @throws URISyntaxException
*/
@Test
public void test_AreCodeUtil_formatFromAreCodeSource() throws IOException, URISyntaxException {
ObjectMapper objectMapper = new ObjectMapper();
Map<String, String> formatFromAreCodeSource = IDCardUtils.AreCodeUtil.formatFromAreCodeSource();
String string = objectMapper.writeValueAsString(formatFromAreCodeSource);
System.out.println(string);
Map<String, String> ProvinceCode = new LinkedHashMap<>();
Map<String, String> cityCode = new LinkedHashMap<>();
for (Entry<String, String> e : formatFromAreCodeSource.entrySet()) {
String key = e.getKey();
if (key.length() == 2) {
ProvinceCode.put(key, e.getValue());
} else if (key.length() == 4) {
cityCode.put(key, e.getValue());
}
}
System.out.println("ProvinceCode:" + ProvinceCode);
System.out.println("cityCode:" + cityCode);
StringBuilder sb = new StringBuilder();
for (Entry<String, String> e : cityCode.entrySet()) {
// CityCode.put("11", "北京市");
sb.append("CityCode.put(\"").append(e.getKey()).append("\"").append(",");
sb.append("\"").append(e.getValue()).append("\");");
}
//复制下面的输出到代码出格式化即可
System.out.println(sb);
}
@Test
public void test_isValidIdCard15_true() {
String idCard15 = "110105710923582";
boolean b = IDCardUtils.isValidIdCard15(idCard15);
Assert.assertThat(b, IsEqual.equalTo(true));
}
@Test
public void test_isValidIdCard15_param_PatternForIdCard15() {
String idCard15 = "11010571092358X";
boolean b = IDCardUtils.isValidIdCard15(idCard15);
Assert.assertThat(b, IsEqual.equalTo(false));
}
@Test
public void test_isValidIdCard15_param_date_0() {
String idCard15 = "110105710933582";
boolean b = IDCardUtils.isValidIdCard15(idCard15);
Assert.assertThat(b, IsEqual.equalTo(false));
}
@Test
public void test_isValidIdCard15_param_date_1() {
String idCard15 = "110105710230582";
boolean b = IDCardUtils.isValidIdCard15(idCard15);
Assert.assertThat(b, IsEqual.equalTo(false));
}
@Test
public void test_isValidIdCard18_true() {
String idCard18 = "110105197109235829";
boolean b = IDCardUtils.isValidIdCard18(idCard18);
Assert.assertThat(b, IsEqual.equalTo(true));
idCard18 = "23213119830817173X";
b = IDCardUtils.isValidIdCard18(idCard18);
Assert.assertThat(b, IsEqual.equalTo(true));
idCard18 = "310102198312252934";
b = IDCardUtils.isValidIdCard18(idCard18);
Assert.assertThat(b, IsEqual.equalTo(true));
idCard18 = "341621198808284713";
b = IDCardUtils.isValidIdCard18(idCard18);
Assert.assertThat(b, IsEqual.equalTo(true));
idCard18 = "331021198703270628";
b = IDCardUtils.isValidIdCard18(idCard18);
Assert.assertThat(b, IsEqual.equalTo(true));
idCard18 = "37078119790127719x";
b = IDCardUtils.isValidIdCard18(idCard18);
Assert.assertThat(b, IsEqual.equalTo(false));
idCard18 = "410526198809018242";
b = IDCardUtils.isValidIdCard18(idCard18);
Assert.assertThat(b, IsEqual.equalTo(true));
idCard18 = "331003199504222393";
b = IDCardUtils.isValidIdCard18(idCard18);
Assert.assertThat(b, IsEqual.equalTo(true));
}
@Test
public void test_isValidIdCard18_PatternForIdCard18_true() {
String idCard18 = "512501197203035172";
boolean b = IDCardUtils.isValidIdCard18(idCard18);
Assert.assertThat(b, IsEqual.equalTo(true));
idCard18 = "51081119840301735X";
b = IDCardUtils.isValidIdCard18(idCard18);
Assert.assertThat(b, IsEqual.equalTo(true));
}
@Test
public void test_isValidIdCard18_param_date_false() {
String idCard18 = "44011119880230214X";
boolean b = IDCardUtils.isValidIdCard18(idCard18);
Assert.assertThat(b, IsEqual.equalTo(false));
}
@Test
public void test_getGender_() {
String idCard18 = "44011119880230214X";
System.out.println(idCard18.length());
String gender = IDCardUtils.getGender(idCard18);
Assert.assertThat(gender, IsEqual.equalTo("F"));
}
}
| apache-2.0 |
matinalhope/explore | spring-jdbc/src/book/java/com/matianl/explore/java7concurrency/Chapter9/ch9_recipe03/src/com/packtpub/java7/concurrency/chapter5/recipe06/task/AlwaysThrowsExceptionWorkerThread.java | 943 | package com.matianl.explore.java7concurrency.Chapter9.ch9_recipe03.src.com.packtpub.java7.concurrency.chapter5.recipe06.task;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.ForkJoinWorkerThread;
/**
* This class implements a worker thread. This is a thread that
* is going to execute ForkJoinTask objects in a ForkJoinPool.
*
* Extends the basic class ForkJoinWorkerThread
*/
public class AlwaysThrowsExceptionWorkerThread extends ForkJoinWorkerThread {
/**
* Constructor of the class. Call the constructor of the
* parent class
* @param pool ForkJoinPool where the thread is going to execute
*/
protected AlwaysThrowsExceptionWorkerThread(ForkJoinPool pool) {
super(pool);
}
/**
* Method that is going to execute where the Worker Thread
* begins its execution
*/
@Override
protected void onStart() {
super.onStart();
throw new RuntimeException("Exception from worker thread");
}
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-ec2/src/main/java/com/amazonaws/services/ec2/model/transform/InstanceEventWindowTimeRangeRequestStaxUnmarshaller.java | 3396 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ec2.model.transform;
import javax.xml.stream.events.XMLEvent;
import javax.annotation.Generated;
import com.amazonaws.services.ec2.model.*;
import com.amazonaws.transform.Unmarshaller;
import com.amazonaws.transform.StaxUnmarshallerContext;
import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*;
/**
* InstanceEventWindowTimeRangeRequest StAX Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class InstanceEventWindowTimeRangeRequestStaxUnmarshaller implements Unmarshaller<InstanceEventWindowTimeRangeRequest, StaxUnmarshallerContext> {
public InstanceEventWindowTimeRangeRequest unmarshall(StaxUnmarshallerContext context) throws Exception {
InstanceEventWindowTimeRangeRequest instanceEventWindowTimeRangeRequest = new InstanceEventWindowTimeRangeRequest();
int originalDepth = context.getCurrentDepth();
int targetDepth = originalDepth + 1;
if (context.isStartOfDocument())
targetDepth += 1;
while (true) {
XMLEvent xmlEvent = context.nextEvent();
if (xmlEvent.isEndDocument())
return instanceEventWindowTimeRangeRequest;
if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) {
if (context.testExpression("StartWeekDay", targetDepth)) {
instanceEventWindowTimeRangeRequest.setStartWeekDay(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("StartHour", targetDepth)) {
instanceEventWindowTimeRangeRequest.setStartHour(IntegerStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("EndWeekDay", targetDepth)) {
instanceEventWindowTimeRangeRequest.setEndWeekDay(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("EndHour", targetDepth)) {
instanceEventWindowTimeRangeRequest.setEndHour(IntegerStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
} else if (xmlEvent.isEndElement()) {
if (context.getCurrentDepth() < originalDepth) {
return instanceEventWindowTimeRangeRequest;
}
}
}
}
private static InstanceEventWindowTimeRangeRequestStaxUnmarshaller instance;
public static InstanceEventWindowTimeRangeRequestStaxUnmarshaller getInstance() {
if (instance == null)
instance = new InstanceEventWindowTimeRangeRequestStaxUnmarshaller();
return instance;
}
}
| apache-2.0 |
Xenation/LWJGL2DTest | src/entities/PlayerCollider.java | 2483 | package entities;
import org.lwjgl.util.vector.Vector2f;
import storage.ChunkMap;
import tiles.Tile;
public class PlayerCollider extends Collider {
private Player player;
public PlayerCollider(Player player) {
super(player.position);
this.player = player;
}
public Vector2f collideSlide(Tile til, ChunkMap chkMap, float nx, float ny) {
Vector2f slide = new Vector2f(1, 1);
if (isColliding(til, nx, ny)) {
float tilTop = til.y + Tile.TILE_SIZE;
float tilBottom = til.y;
float tilRight = til.x + Tile.TILE_SIZE;
float tilLeft = til.x;
float curTop = getWY() + h;
float curBottom = getWY();
float curRight = getWX() + w;
float curLeft = getWX();
if (curRight <= tilLeft) {
// We are left of tile
Tile left = chkMap.getLeftTile(til);
if (left != null && left.getType().isSolid() && (curBottom >= tilTop || curTop <= tilBottom)) { // There is a left tile
slide.y = 0;
} else {
Tile top = chkMap.getTopTile(til);
if (!player.isInAir() && (top == null || !top.getType().isSolid()) && (curBottom >= tilBottom && curBottom <= tilTop)) { // There is no top tile
this.entPos.y += Tile.TILE_SIZE + 0.01f; // Step Up
} else {
slide.x = 0;
}
}
}
if (curLeft >= tilRight) {
// We are right of tile
Tile right = chkMap.getRightTile(til);
if (right != null && right.getType().isSolid() && (curBottom >= tilTop || curTop <= tilBottom)) { // There is a right tile
slide.y = 0;
} else {
Tile top = chkMap.getTopTile(til);
if (!player.isInAir() && (top == null || !top.getType().isSolid()) && (curBottom >= tilBottom && curBottom <= tilTop)) { // There is no top tile
this.entPos.y += Tile.TILE_SIZE + 0.01f; // Step Up
} else {
slide.x = 0;
}
}
}
if (curBottom >= tilTop) {
// We are top of tile
Tile top = chkMap.getTopTile(til);
if (top != null && top.getType().isSolid() && (curLeft >= tilRight || curRight <= tilLeft)) { // There is a top tile
slide.x = 0;
} else {
slide.y = 0;
}
}
if (curTop <= tilBottom) {
// We are bottom of tile
Tile bottom = chkMap.getBottomTile(til);
if (bottom != null && bottom.getType().isSolid() && (curLeft >= tilRight || curRight <= tilLeft)) { // There is a bottom tile
slide.x = 0;
} else {
slide.y = 0;
}
}
}
return slide;
}
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-ec2/src/main/java/com/amazonaws/services/ec2/model/Tenancy.java | 1753 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import javax.annotation.Generated;
/**
*
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public enum Tenancy {
Default("default"),
Dedicated("dedicated"),
Host("host");
private String value;
private Tenancy(String value) {
this.value = value;
}
@Override
public String toString() {
return this.value;
}
/**
* Use this in place of valueOf.
*
* @param value
* real value
* @return Tenancy corresponding to the value
*
* @throws IllegalArgumentException
* If the specified value does not map to one of the known values in this enum.
*/
public static Tenancy fromValue(String value) {
if (value == null || "".equals(value)) {
throw new IllegalArgumentException("Value cannot be null or empty!");
}
for (Tenancy enumEntry : Tenancy.values()) {
if (enumEntry.toString().equals(value)) {
return enumEntry;
}
}
throw new IllegalArgumentException("Cannot create enum from " + value + " value!");
}
}
| apache-2.0 |
BasisTI/rancher-java-sdk | src/main/java/io/rancher/service/ConfigItemService.java | 921 | package io.rancher.service;
import io.rancher.base.Filters;
import io.rancher.base.TypeCollection;
import io.rancher.type.ConfigItem;
import retrofit2.Call;
import retrofit2.Response;
import retrofit2.http.Body;
import retrofit2.http.DELETE;
import retrofit2.http.GET;
import retrofit2.http.POST;
import retrofit2.http.PUT;
import retrofit2.http.Path;
import retrofit2.http.QueryMap;
public interface ConfigItemService {
@GET("configItem")
Call<TypeCollection<ConfigItem>> list();
@GET("configItem")
Call<TypeCollection<ConfigItem>> list(@QueryMap Filters filters);
@GET("configItem/{id}")
Call<ConfigItem> get(@Path("id") String id);
@POST("configItem")
Call<ConfigItem> create(@Body ConfigItem configItem);
@PUT("configItem/{id}")
Call<ConfigItem> update(@Path("id") String id, @Body ConfigItem configItem);
@DELETE("configItem/{id}")
Call<Response> delete(@Path("id") String id);
}
| apache-2.0 |
masaruh/elasticsearch | test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java | 106559 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test;
import com.carrotsearch.hppc.ObjectLongMap;
import com.carrotsearch.randomizedtesting.RandomizedContext;
import com.carrotsearch.randomizedtesting.annotations.TestGroup;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.apache.http.HttpHost;
import org.apache.lucene.search.Sort;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.DocWriteResponse;
import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.node.info.NodeInfo;
import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
import org.elasticsearch.action.admin.indices.flush.FlushResponse;
import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse;
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.action.admin.indices.refresh.RefreshResponse;
import org.elasticsearch.action.admin.indices.segments.IndexSegments;
import org.elasticsearch.action.admin.indices.segments.IndexShardSegments;
import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse;
import org.elasticsearch.action.admin.indices.segments.ShardSegments;
import org.elasticsearch.action.admin.indices.stats.IndexShardStats;
import org.elasticsearch.action.admin.indices.stats.IndexStats;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.admin.indices.stats.ShardStats;
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequestBuilder;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.ClearScrollResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.AdminClient;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.Requests;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.cluster.ClusterModule;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings;
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.network.NetworkAddress;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.discovery.zen.ElectMasterService;
import org.elasticsearch.discovery.zen.ZenDiscovery;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.MergePolicyConfig;
import org.elasticsearch.index.MergeSchedulerConfig;
import org.elasticsearch.index.MockEngineFactoryPlugin;
import org.elasticsearch.index.codec.CodecService;
import org.elasticsearch.index.engine.Segment;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MockFieldFilterPlugin;
import org.elasticsearch.index.seqno.SeqNoStats;
import org.elasticsearch.index.seqno.SequenceNumbers;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.indices.IndicesQueryCache;
import org.elasticsearch.indices.IndicesRequestCache;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.store.IndicesStore;
import org.elasticsearch.node.NodeMocksPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.MockSearchService;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchService;
import org.elasticsearch.test.client.RandomizingClient;
import org.elasticsearch.test.discovery.TestZenDiscovery;
import org.elasticsearch.test.disruption.NetworkDisruption;
import org.elasticsearch.test.disruption.ServiceDisruptionScheme;
import org.elasticsearch.test.store.MockFSIndexStore;
import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.transport.AssertingTransportInterceptor;
import org.hamcrest.Matchers;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.BooleanSupplier;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.elasticsearch.client.Requests.syncedFlushRequest;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS;
import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS;
import static org.elasticsearch.common.util.CollectionUtils.eagerPartition;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.test.XContentTestUtils.convertToMap;
import static org.elasticsearch.test.XContentTestUtils.differenceBetweenMapsIgnoringArrayOrder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.emptyArray;
import static org.hamcrest.Matchers.emptyIterable;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.startsWith;
/**
* {@link ESIntegTestCase} is an abstract base class to run integration
* tests against a JVM private Elasticsearch Cluster. The test class supports 2 different
* cluster scopes.
* <ul>
* <li>{@link Scope#TEST} - uses a new cluster for each individual test method.</li>
* <li>{@link Scope#SUITE} - uses a cluster shared across all test methods in the same suite</li>
* </ul>
* <p>
* The most common test scope is {@link Scope#SUITE} which shares a cluster per test suite.
* <p>
* If the test methods need specific node settings or change persistent and/or transient cluster settings {@link Scope#TEST}
* should be used. To configure a scope for the test cluster the {@link ClusterScope} annotation
* should be used, here is an example:
* <pre>
*
* {@literal @}NodeScope(scope=Scope.TEST) public class SomeIT extends ESIntegTestCase {
* public void testMethod() {}
* }
* </pre>
* <p>
* If no {@link ClusterScope} annotation is present on an integration test the default scope is {@link Scope#SUITE}
* <p>
* A test cluster creates a set of nodes in the background before the test starts. The number of nodes in the cluster is
* determined at random and can change across tests. The {@link ClusterScope} allows configuring the initial number of nodes
* that are created before the tests start.
* <pre>
* {@literal @}NodeScope(scope=Scope.SUITE, numDataNodes=3)
* public class SomeIT extends ESIntegTestCase {
* public void testMethod() {}
* }
* </pre>
* <p>
* Note, the {@link ESIntegTestCase} uses randomized settings on a cluster and index level. For instance
* each test might use different directory implementation for each test or will return a random client to one of the
* nodes in the cluster for each call to {@link #client()}. Test failures might only be reproducible if the correct
* system properties are passed to the test execution environment.
* <p>
* This class supports the following system properties (passed with -Dkey=value to the application)
* <ul>
* <li>-D{@value #TESTS_CLIENT_RATIO} - a double value in the interval [0..1] which defines the ration between node and transport clients used</li>
* <li>-D{@value #TESTS_ENABLE_MOCK_MODULES} - a boolean value to enable or disable mock modules. This is
* useful to test the system without asserting modules that to make sure they don't hide any bugs in production.</li>
* <li> - a random seed used to initialize the index random context.
* </ul>
*/
@LuceneTestCase.SuppressFileSystems("ExtrasFS") // doesn't work with potential multi data path from test cluster yet
public abstract class ESIntegTestCase extends ESTestCase {
/**
* Property that controls whether ThirdParty Integration tests are run (not the default).
*/
public static final String SYSPROP_THIRDPARTY = "tests.thirdparty";
/**
* Annotation for third-party integration tests.
* <p>
* These are tests the require a third-party service in order to run. They
* may require the user to manually configure an external process (such as rabbitmq),
* or may additionally require some external configuration (e.g. AWS credentials)
* via the {@code tests.config} system property.
*/
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
@TestGroup(enabled = false, sysProperty = ESIntegTestCase.SYSPROP_THIRDPARTY)
public @interface ThirdParty {
}
/** node names of the corresponding clusters will start with these prefixes */
public static final String SUITE_CLUSTER_NODE_PREFIX = "node_s";
public static final String TEST_CLUSTER_NODE_PREFIX = "node_t";
/**
* Key used to set the transport client ratio via the commandline -D{@value #TESTS_CLIENT_RATIO}
*/
public static final String TESTS_CLIENT_RATIO = "tests.client.ratio";
/**
* Key used to eventually switch to using an external cluster and provide its transport addresses
*/
public static final String TESTS_CLUSTER = "tests.cluster";
/**
* Key used to retrieve the index random seed from the index settings on a running node.
* The value of this seed can be used to initialize a random context for a specific index.
* It's set once per test via a generic index template.
*/
public static final Setting<Long> INDEX_TEST_SEED_SETTING =
Setting.longSetting("index.tests.seed", 0, Long.MIN_VALUE, Property.IndexScope);
/**
* A boolean value to enable or disable mock modules. This is useful to test the
* system without asserting modules that to make sure they don't hide any bugs in
* production.
*
* @see ESIntegTestCase
*/
public static final String TESTS_ENABLE_MOCK_MODULES = "tests.enable_mock_modules";
private static final boolean MOCK_MODULES_ENABLED = "true".equals(System.getProperty(TESTS_ENABLE_MOCK_MODULES, "true"));
/**
* Threshold at which indexing switches from frequently async to frequently bulk.
*/
private static final int FREQUENT_BULK_THRESHOLD = 300;
/**
* Threshold at which bulk indexing will always be used.
*/
private static final int ALWAYS_BULK_THRESHOLD = 3000;
/**
* Maximum number of async operations that indexRandom will kick off at one time.
*/
private static final int MAX_IN_FLIGHT_ASYNC_INDEXES = 150;
/**
* Maximum number of documents in a single bulk index request.
*/
private static final int MAX_BULK_INDEX_REQUEST_SIZE = 1000;
/**
* Default minimum number of shards for an index
*/
protected static final int DEFAULT_MIN_NUM_SHARDS = 1;
/**
* Default maximum number of shards for an index
*/
protected static final int DEFAULT_MAX_NUM_SHARDS = 10;
/**
* The current cluster depending on the configured {@link Scope}.
* By default if no {@link ClusterScope} is configured this will hold a reference to the suite cluster.
*/
private static TestCluster currentCluster;
private static RestClient restClient = null;
private static final double TRANSPORT_CLIENT_RATIO = transportClientRatio();
private static final Map<Class<?>, TestCluster> clusters = new IdentityHashMap<>();
private static ESIntegTestCase INSTANCE = null; // see @SuiteScope
private static Long SUITE_SEED = null;
@BeforeClass
public static void beforeClass() throws Exception {
SUITE_SEED = randomLong();
initializeSuiteScope();
}
@Override
protected final boolean enableWarningsCheck() {
//In an integ test it doesn't make sense to keep track of warnings: if the cluster is external the warnings are in another jvm,
//if the cluster is internal the deprecation logger is shared across all nodes
return false;
}
protected final void beforeInternal() throws Exception {
final Scope currentClusterScope = getCurrentClusterScope();
switch (currentClusterScope) {
case SUITE:
assert SUITE_SEED != null : "Suite seed was not initialized";
currentCluster = buildAndPutCluster(currentClusterScope, SUITE_SEED);
break;
case TEST:
currentCluster = buildAndPutCluster(currentClusterScope, randomLong());
break;
default:
fail("Unknown Scope: [" + currentClusterScope + "]");
}
cluster().beforeTest(random(), getPerTestTransportClientRatio());
cluster().wipe(excludeTemplates());
randomIndexTemplate();
}
private void printTestMessage(String message) {
if (isSuiteScopedTest(getClass()) && (getTestName().equals("<unknown>"))) {
logger.info("[{}]: {} suite", getTestClass().getSimpleName(), message);
} else {
logger.info("[{}#{}]: {} test", getTestClass().getSimpleName(), getTestName(), message);
}
}
/**
* Creates a randomized index template. This template is used to pass in randomized settings on a
* per index basis. Allows to enable/disable the randomization for number of shards and replicas
*/
public void randomIndexTemplate() throws IOException {
// TODO move settings for random directory etc here into the index based randomized settings.
if (cluster().size() > 0) {
Settings.Builder randomSettingsBuilder =
setRandomIndexSettings(random(), Settings.builder());
if (isInternalCluster()) {
// this is only used by mock plugins and if the cluster is not internal we just can't set it
randomSettingsBuilder.put(INDEX_TEST_SEED_SETTING.getKey(), random().nextLong());
}
randomSettingsBuilder.put(SETTING_NUMBER_OF_SHARDS, numberOfShards())
.put(SETTING_NUMBER_OF_REPLICAS, numberOfReplicas());
// if the test class is annotated with SuppressCodecs("*"), it means don't use lucene's codec randomization
// otherwise, use it, it has assertions and so on that can find bugs.
SuppressCodecs annotation = getClass().getAnnotation(SuppressCodecs.class);
if (annotation != null && annotation.value().length == 1 && "*".equals(annotation.value()[0])) {
randomSettingsBuilder.put("index.codec", randomFrom(CodecService.DEFAULT_CODEC, CodecService.BEST_COMPRESSION_CODEC));
} else {
randomSettingsBuilder.put("index.codec", CodecService.LUCENE_DEFAULT_CODEC);
}
for (String setting : randomSettingsBuilder.keys()) {
assertThat("non index. prefix setting set on index template, its a node setting...", setting, startsWith("index."));
}
// always default delayed allocation to 0 to make sure we have tests are not delayed
randomSettingsBuilder.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), 0);
if (randomBoolean()) {
randomSettingsBuilder.put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), randomBoolean());
}
if (randomBoolean()) {
randomSettingsBuilder.put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), randomBoolean());
}
PutIndexTemplateRequestBuilder putTemplate = client().admin().indices()
.preparePutTemplate("random_index_template")
.setPatterns(Collections.singletonList("*"))
.setOrder(0)
.setSettings(randomSettingsBuilder);
assertAcked(putTemplate.execute().actionGet());
}
}
protected Settings.Builder setRandomIndexSettings(Random random, Settings.Builder builder) {
setRandomIndexMergeSettings(random, builder);
setRandomIndexTranslogSettings(random, builder);
if (random.nextBoolean()) {
builder.put(MergeSchedulerConfig.AUTO_THROTTLE_SETTING.getKey(), false);
}
if (random.nextBoolean()) {
builder.put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), random.nextBoolean());
}
if (random.nextBoolean()) {
builder.put(IndexSettings.INDEX_CHECK_ON_STARTUP.getKey(), randomFrom("false", "checksum", "true"));
}
if (randomBoolean()) {
// keep this low so we don't stall tests
builder.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), RandomNumbers.randomIntBetween(random, 1, 15) + "ms");
}
return builder;
}
private static Settings.Builder setRandomIndexMergeSettings(Random random, Settings.Builder builder) {
if (random.nextBoolean()) {
builder.put(MergePolicyConfig.INDEX_COMPOUND_FORMAT_SETTING.getKey(),
(random.nextBoolean() ? random.nextDouble() : random.nextBoolean()).toString());
}
switch (random.nextInt(4)) {
case 3:
final int maxThreadCount = RandomNumbers.randomIntBetween(random, 1, 4);
final int maxMergeCount = RandomNumbers.randomIntBetween(random, maxThreadCount, maxThreadCount + 4);
builder.put(MergeSchedulerConfig.MAX_MERGE_COUNT_SETTING.getKey(), maxMergeCount);
builder.put(MergeSchedulerConfig.MAX_THREAD_COUNT_SETTING.getKey(), maxThreadCount);
break;
}
return builder;
}
private static Settings.Builder setRandomIndexTranslogSettings(Random random, Settings.Builder builder) {
if (random.nextBoolean()) {
builder.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(RandomNumbers.randomIntBetween(random, 1, 300), ByteSizeUnit.MB));
}
if (random.nextBoolean()) {
builder.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB)); // just don't flush
}
if (random.nextBoolean()) {
builder.put(IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING.getKey(), RandomPicks.randomFrom(random, Translog.Durability.values()));
}
if (random.nextBoolean()) {
builder.put(IndexSettings.INDEX_TRANSLOG_SYNC_INTERVAL_SETTING.getKey(), RandomNumbers.randomIntBetween(random, 100, 5000), TimeUnit.MILLISECONDS);
}
return builder;
}
private TestCluster buildWithPrivateContext(final Scope scope, final long seed) throws Exception {
return RandomizedContext.current().runWithPrivateRandomness(seed, new Callable<TestCluster>() {
@Override
public TestCluster call() throws Exception {
return buildTestCluster(scope, seed);
}
});
}
private TestCluster buildAndPutCluster(Scope currentClusterScope, long seed) throws Exception {
final Class<?> clazz = this.getClass();
TestCluster testCluster = clusters.remove(clazz); // remove this cluster first
clearClusters(); // all leftovers are gone by now... this is really just a double safety if we miss something somewhere
switch (currentClusterScope) {
case SUITE:
if (testCluster == null) { // only build if it's not there yet
testCluster = buildWithPrivateContext(currentClusterScope, seed);
}
break;
case TEST:
// close the previous one and create a new one
IOUtils.closeWhileHandlingException(testCluster);
testCluster = buildTestCluster(currentClusterScope, seed);
break;
}
clusters.put(clazz, testCluster);
return testCluster;
}
private static void clearClusters() throws IOException {
if (!clusters.isEmpty()) {
IOUtils.close(clusters.values());
clusters.clear();
}
if (restClient != null) {
restClient.close();
restClient = null;
}
}
protected final void afterInternal(boolean afterClass) throws Exception {
boolean success = false;
try {
final Scope currentClusterScope = getCurrentClusterScope();
clearDisruptionScheme();
try {
if (cluster() != null) {
if (currentClusterScope != Scope.TEST) {
MetaData metaData = client().admin().cluster().prepareState().execute().actionGet().getState().getMetaData();
final Set<String> persistent = metaData.persistentSettings().keySet();
assertThat("test leaves persistent cluster metadata behind: " + persistent, persistent.size(), equalTo(0));
final Set<String> transientSettings = new HashSet<>(metaData.transientSettings().keySet());
if (isInternalCluster() && internalCluster().getAutoManageMinMasterNode()) {
// this is set by the test infra
transientSettings.remove(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey());
}
assertThat("test leaves transient cluster metadata behind: " + transientSettings,
transientSettings, empty());
}
ensureClusterSizeConsistency();
ensureClusterStateConsistency();
if (isInternalCluster()) {
// check no pending cluster states are leaked
for (Discovery discovery : internalCluster().getInstances(Discovery.class)) {
if (discovery instanceof ZenDiscovery) {
final ZenDiscovery zenDiscovery = (ZenDiscovery) discovery;
assertBusy(() -> {
final ClusterState[] states = zenDiscovery.pendingClusterStates();
assertThat(zenDiscovery.clusterState().nodes().getLocalNode().getName() +
" still having pending states:\n" +
Stream.of(states).map(ClusterState::toString).collect(Collectors.joining("\n")),
states, emptyArray());
});
}
}
}
beforeIndexDeletion();
cluster().wipe(excludeTemplates()); // wipe after to make sure we fail in the test that didn't ack the delete
if (afterClass || currentClusterScope == Scope.TEST) {
cluster().close();
}
cluster().assertAfterTest();
}
} finally {
if (currentClusterScope == Scope.TEST) {
clearClusters(); // it is ok to leave persistent / transient cluster state behind if scope is TEST
}
}
success = true;
} finally {
if (!success) {
// if we failed here that means that something broke horribly so we should clear all clusters
// TODO: just let the exception happen, WTF is all this horseshit
// afterTestRule.forceFailure();
}
}
}
/**
* @return An exclude set of index templates that will not be removed in between tests.
*/
protected Set<String> excludeTemplates() {
return Collections.emptySet();
}
protected void beforeIndexDeletion() throws Exception {
cluster().beforeIndexDeletion();
}
public static TestCluster cluster() {
return currentCluster;
}
public static boolean isInternalCluster() {
return (currentCluster instanceof InternalTestCluster);
}
public static InternalTestCluster internalCluster() {
if (!isInternalCluster()) {
throw new UnsupportedOperationException("current test cluster is immutable");
}
return (InternalTestCluster) currentCluster;
}
public ClusterService clusterService() {
return internalCluster().clusterService();
}
public static Client client() {
return client(null);
}
public static Client client(@Nullable String node) {
if (node != null) {
return internalCluster().client(node);
}
Client client = cluster().client();
if (frequently()) {
client = new RandomizingClient(client, random());
}
return client;
}
public static Client dataNodeClient() {
Client client = internalCluster().dataNodeClient();
if (frequently()) {
client = new RandomizingClient(client, random());
}
return client;
}
public static Iterable<Client> clients() {
return cluster().getClients();
}
protected int minimumNumberOfShards() {
return DEFAULT_MIN_NUM_SHARDS;
}
protected int maximumNumberOfShards() {
return DEFAULT_MAX_NUM_SHARDS;
}
protected int numberOfShards() {
return between(minimumNumberOfShards(), maximumNumberOfShards());
}
protected int minimumNumberOfReplicas() {
return 0;
}
protected int maximumNumberOfReplicas() {
//use either 0 or 1 replica, yet a higher amount when possible, but only rarely
int maxNumReplicas = Math.max(0, cluster().numDataNodes() - 1);
return frequently() ? Math.min(1, maxNumReplicas) : maxNumReplicas;
}
protected int numberOfReplicas() {
return between(minimumNumberOfReplicas(), maximumNumberOfReplicas());
}
public void setDisruptionScheme(ServiceDisruptionScheme scheme) {
internalCluster().setDisruptionScheme(scheme);
}
public void clearDisruptionScheme() {
if (isInternalCluster()) {
internalCluster().clearDisruptionScheme();
}
}
/**
* Returns a settings object used in {@link #createIndex(String...)} and {@link #prepareCreate(String)} and friends.
* This method can be overwritten by subclasses to set defaults for the indices that are created by the test.
* By default it returns a settings object that sets a random number of shards. Number of shards and replicas
* can be controlled through specific methods.
*/
public Settings indexSettings() {
Settings.Builder builder = Settings.builder();
int numberOfShards = numberOfShards();
if (numberOfShards > 0) {
builder.put(SETTING_NUMBER_OF_SHARDS, numberOfShards).build();
}
int numberOfReplicas = numberOfReplicas();
if (numberOfReplicas >= 0) {
builder.put(SETTING_NUMBER_OF_REPLICAS, numberOfReplicas).build();
}
// 30% of the time
if (randomInt(9) < 3) {
final String dataPath = randomAlphaOfLength(10);
logger.info("using custom data_path for index: [{}]", dataPath);
builder.put(IndexMetaData.SETTING_DATA_PATH, dataPath);
}
// always default delayed allocation to 0 to make sure we have tests are not delayed
builder.put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), 0);
return builder.build();
}
/**
* Creates one or more indices and asserts that the indices are acknowledged. If one of the indices
* already exists this method will fail and wipe all the indices created so far.
*/
public final void createIndex(String... names) {
List<String> created = new ArrayList<>();
for (String name : names) {
boolean success = false;
try {
assertAcked(prepareCreate(name));
created.add(name);
success = true;
} finally {
if (!success && !created.isEmpty()) {
cluster().wipeIndices(created.toArray(new String[created.size()]));
}
}
}
}
/**
* Creates a new {@link CreateIndexRequestBuilder} with the settings obtained from {@link #indexSettings()}.
*/
public final CreateIndexRequestBuilder prepareCreate(String index) {
return prepareCreate(index, -1);
}
/**
* Creates a new {@link CreateIndexRequestBuilder} with the settings obtained from {@link #indexSettings()}.
* The index that is created with this builder will only be allowed to allocate on the number of nodes passed to this
* method.
* <p>
* This method uses allocation deciders to filter out certain nodes to allocate the created index on. It defines allocation
* rules based on <code>index.routing.allocation.exclude._name</code>.
* </p>
*/
public final CreateIndexRequestBuilder prepareCreate(String index, int numNodes) {
return prepareCreate(index, numNodes, Settings.builder());
}
/**
* Creates a new {@link CreateIndexRequestBuilder} with the settings obtained from {@link #indexSettings()}, augmented
* by the given builder
*/
public CreateIndexRequestBuilder prepareCreate(String index, Settings.Builder settingsBuilder) {
return prepareCreate(index, -1, settingsBuilder);
}
/**
* Creates a new {@link CreateIndexRequestBuilder} with the settings obtained from {@link #indexSettings()}.
* The index that is created with this builder will only be allowed to allocate on the number of nodes passed to this
* method.
* <p>
* This method uses allocation deciders to filter out certain nodes to allocate the created index on. It defines allocation
* rules based on <code>index.routing.allocation.exclude._name</code>.
* </p>
*/
public CreateIndexRequestBuilder prepareCreate(String index, int numNodes, Settings.Builder settingsBuilder) {
Settings.Builder builder = Settings.builder().put(indexSettings()).put(settingsBuilder.build());
if (numNodes > 0) {
internalCluster().ensureAtLeastNumDataNodes(numNodes);
getExcludeSettings(index, numNodes, builder);
}
return client().admin().indices().prepareCreate(index).setSettings(builder.build());
}
private Settings.Builder getExcludeSettings(String index, int num, Settings.Builder builder) {
String exclude = String.join(",", internalCluster().allDataNodesButN(num));
builder.put("index.routing.allocation.exclude._name", exclude);
return builder;
}
/**
* Waits until all nodes have no pending tasks.
*/
public void waitNoPendingTasksOnAll() throws Exception {
assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).get());
assertBusy(() -> {
for (Client client : clients()) {
ClusterHealthResponse clusterHealth = client.admin().cluster().prepareHealth().setLocal(true).get();
assertThat("client " + client + " still has in flight fetch", clusterHealth.getNumberOfInFlightFetch(), equalTo(0));
PendingClusterTasksResponse pendingTasks = client.admin().cluster().preparePendingClusterTasks().setLocal(true).get();
assertThat("client " + client + " still has pending tasks " + pendingTasks, pendingTasks, Matchers.emptyIterable());
clusterHealth = client.admin().cluster().prepareHealth().setLocal(true).get();
assertThat("client " + client + " still has in flight fetch", clusterHealth.getNumberOfInFlightFetch(), equalTo(0));
}
});
assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).get());
}
/**
* Waits till a (pattern) field name mappings concretely exists on all nodes. Note, this waits for the current
* started shards and checks for concrete mappings.
*/
public void assertConcreteMappingsOnAll(final String index, final String type, final String... fieldNames) throws Exception {
Set<String> nodes = internalCluster().nodesInclude(index);
assertThat(nodes, Matchers.not(Matchers.emptyIterable()));
for (String node : nodes) {
IndicesService indicesService = internalCluster().getInstance(IndicesService.class, node);
IndexService indexService = indicesService.indexService(resolveIndex(index));
assertThat("index service doesn't exists on " + node, indexService, notNullValue());
DocumentMapper documentMapper = indexService.mapperService().documentMapper(type);
assertThat("document mapper doesn't exists on " + node, documentMapper, notNullValue());
for (String fieldName : fieldNames) {
Collection<String> matches = documentMapper.mappers().simpleMatchToFullName(fieldName);
assertThat("field " + fieldName + " doesn't exists on " + node, matches, Matchers.not(emptyIterable()));
}
}
assertMappingOnMaster(index, type, fieldNames);
}
/**
* Waits for the given mapping type to exists on the master node.
*/
public void assertMappingOnMaster(final String index, final String type, final String... fieldNames) throws Exception {
GetMappingsResponse response = client().admin().indices().prepareGetMappings(index).setTypes(type).get();
ImmutableOpenMap<String, MappingMetaData> mappings = response.getMappings().get(index);
assertThat(mappings, notNullValue());
MappingMetaData mappingMetaData = mappings.get(type);
assertThat(mappingMetaData, notNullValue());
Map<String, Object> mappingSource = mappingMetaData.getSourceAsMap();
assertFalse(mappingSource.isEmpty());
assertTrue(mappingSource.containsKey("properties"));
for (String fieldName : fieldNames) {
Map<String, Object> mappingProperties = (Map<String, Object>) mappingSource.get("properties");
if (fieldName.indexOf('.') != -1) {
fieldName = fieldName.replace(".", ".properties.");
}
assertThat("field " + fieldName + " doesn't exists in mapping " + mappingMetaData.source().string(), XContentMapValues.extractValue(fieldName, mappingProperties), notNullValue());
}
}
/** Ensures the result counts are as expected, and logs the results if different */
public void assertResultsAndLogOnFailure(long expectedResults, SearchResponse searchResponse) {
if (searchResponse.getHits().getTotalHits() != expectedResults) {
StringBuilder sb = new StringBuilder("search result contains [");
sb.append(searchResponse.getHits().getTotalHits()).append("] results. expected [").append(expectedResults).append("]");
String failMsg = sb.toString();
for (SearchHit hit : searchResponse.getHits().getHits()) {
sb.append("\n-> _index: [").append(hit.getIndex()).append("] type [").append(hit.getType())
.append("] id [").append(hit.getId()).append("]");
}
logger.warn("{}", sb);
fail(failMsg);
}
}
/**
* Restricts the given index to be allocated on <code>n</code> nodes using the allocation deciders.
* Yet if the shards can't be allocated on any other node shards for this index will remain allocated on
* more than <code>n</code> nodes.
*/
public void allowNodes(String index, int n) {
assert index != null;
internalCluster().ensureAtLeastNumDataNodes(n);
Settings.Builder builder = Settings.builder();
if (n > 0) {
getExcludeSettings(index, n, builder);
}
Settings build = builder.build();
if (!build.isEmpty()) {
logger.debug("allowNodes: updating [{}]'s setting to [{}]", index, build.toDelimitedString(';'));
client().admin().indices().prepareUpdateSettings(index).setSettings(build).execute().actionGet();
}
}
/**
* Ensures the cluster has a green state via the cluster health API. This method will also wait for relocations.
* It is useful to ensure that all action on the cluster have finished and all shards that were currently relocating
* are now allocated and started.
*/
public ClusterHealthStatus ensureGreen(String... indices) {
return ensureGreen(TimeValue.timeValueSeconds(30), indices);
}
/**
* Ensures the cluster has a green state via the cluster health API. This method will also wait for relocations.
* It is useful to ensure that all action on the cluster have finished and all shards that were currently relocating
* are now allocated and started.
*
* @param timeout time out value to set on {@link org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest}
*/
public ClusterHealthStatus ensureGreen(TimeValue timeout, String... indices) {
return ensureColor(ClusterHealthStatus.GREEN, timeout, indices);
}
/**
* Ensures the cluster has a yellow state via the cluster health API.
*/
public ClusterHealthStatus ensureYellow(String... indices) {
return ensureColor(ClusterHealthStatus.YELLOW, TimeValue.timeValueSeconds(30), indices);
}
private ClusterHealthStatus ensureColor(ClusterHealthStatus clusterHealthStatus, TimeValue timeout, String... indices) {
String color = clusterHealthStatus.name().toLowerCase(Locale.ROOT);
String method = "ensure" + Strings.capitalize(color);
ClusterHealthRequest healthRequest = Requests.clusterHealthRequest(indices)
.timeout(timeout)
.waitForStatus(clusterHealthStatus)
.waitForEvents(Priority.LANGUID)
.waitForNoRelocatingShards(true)
// We currently often use ensureGreen or ensureYellow to check whether the cluster is back in a good state after shutting down
// a node. If the node that is stopped is the master node, another node will become master and publish a cluster state where it
// is master but where the node that was stopped hasn't been removed yet from the cluster state. It will only subsequently
// publish a second state where the old master is removed. If the ensureGreen/ensureYellow is timed just right, it will get to
// execute before the second cluster state update removes the old master and the condition ensureGreen / ensureYellow will
// trivially hold if it held before the node was shut down. The following "waitForNodes" condition ensures that the node has
// been removed by the master so that the health check applies to the set of nodes we expect to be part of the cluster.
.waitForNodes(Integer.toString(cluster().size()));
ClusterHealthResponse actionGet = client().admin().cluster().health(healthRequest).actionGet();
if (actionGet.isTimedOut()) {
logger.info("{} timed out, cluster state:\n{}\n{}",
method,
client().admin().cluster().prepareState().get().getState(),
client().admin().cluster().preparePendingClusterTasks().get());
fail("timed out waiting for " + color + " state");
}
assertThat("Expected at least " + clusterHealthStatus + " but got " + actionGet.getStatus(),
actionGet.getStatus().value(), lessThanOrEqualTo(clusterHealthStatus.value()));
logger.debug("indices {} are {}", indices.length == 0 ? "[_all]" : indices, color);
return actionGet.getStatus();
}
/**
* Waits for all relocating shards to become active using the cluster health API.
*/
public ClusterHealthStatus waitForRelocation() {
return waitForRelocation(null);
}
/**
* Waits for all relocating shards to become active and the cluster has reached the given health status
* using the cluster health API.
*/
public ClusterHealthStatus waitForRelocation(ClusterHealthStatus status) {
ClusterHealthRequest request = Requests.clusterHealthRequest().waitForNoRelocatingShards(true);
if (status != null) {
request.waitForStatus(status);
}
ClusterHealthResponse actionGet = client().admin().cluster()
.health(request).actionGet();
if (actionGet.isTimedOut()) {
logger.info("waitForRelocation timed out (status={}), cluster state:\n{}\n{}", status,
client().admin().cluster().prepareState().get().getState(), client().admin().cluster().preparePendingClusterTasks().get());
assertThat("timed out waiting for relocation", actionGet.isTimedOut(), equalTo(false));
}
if (status != null) {
assertThat(actionGet.getStatus(), equalTo(status));
}
return actionGet.getStatus();
}
/**
* Waits until at least a give number of document is visible for searchers
*
* @param numDocs number of documents to wait for.
* @return the actual number of docs seen.
*/
public long waitForDocs(final long numDocs) throws InterruptedException {
return waitForDocs(numDocs, null);
}
/**
* Waits until at least a give number of document is visible for searchers
*
* @param numDocs number of documents to wait for
* @param indexer a {@link org.elasticsearch.test.BackgroundIndexer}. If supplied it will be first checked for documents indexed.
* This saves on unneeded searches.
* @return the actual number of docs seen.
*/
public long waitForDocs(final long numDocs, @Nullable final BackgroundIndexer indexer) throws InterruptedException {
// indexing threads can wait for up to ~1m before retrying when they first try to index into a shard which is not STARTED.
return waitForDocs(numDocs, 90, TimeUnit.SECONDS, indexer);
}
/**
* Waits until at least a give number of document is visible for searchers
*
* @param numDocs number of documents to wait for
* @param maxWaitTime if not progress have been made during this time, fail the test
* @param maxWaitTimeUnit the unit in which maxWaitTime is specified
* @param indexer a {@link org.elasticsearch.test.BackgroundIndexer}. If supplied it will be first checked for documents indexed.
* This saves on unneeded searches.
* @return the actual number of docs seen.
*/
public long waitForDocs(final long numDocs, int maxWaitTime, TimeUnit maxWaitTimeUnit, @Nullable final BackgroundIndexer indexer)
throws InterruptedException {
final AtomicLong lastKnownCount = new AtomicLong(-1);
long lastStartCount = -1;
BooleanSupplier testDocs = () -> {
if (indexer != null) {
lastKnownCount.set(indexer.totalIndexedDocs());
}
if (lastKnownCount.get() >= numDocs) {
try {
long count = client().prepareSearch().setSize(0).setQuery(matchAllQuery()).execute().actionGet().getHits().getTotalHits();
if (count == lastKnownCount.get()) {
// no progress - try to refresh for the next time
client().admin().indices().prepareRefresh().get();
}
lastKnownCount.set(count);
} catch (Exception e) { // count now acts like search and barfs if all shards failed...
logger.debug("failed to executed count", e);
return false;
}
logger.debug("[{}] docs visible for search. waiting for [{}]", lastKnownCount.get(), numDocs);
} else {
logger.debug("[{}] docs indexed. waiting for [{}]", lastKnownCount.get(), numDocs);
}
return lastKnownCount.get() >= numDocs;
};
while (!awaitBusy(testDocs, maxWaitTime, maxWaitTimeUnit)) {
if (lastStartCount == lastKnownCount.get()) {
// we didn't make any progress
fail("failed to reach " + numDocs + "docs");
}
lastStartCount = lastKnownCount.get();
}
return lastKnownCount.get();
}
/**
* Sets the cluster's minimum master node and make sure the response is acknowledge.
* Note: this doesn't guarantee that the new setting has taken effect, just that it has been received by all nodes.
*/
public void setMinimumMasterNodes(int n) {
assertTrue(client().admin().cluster().prepareUpdateSettings().setTransientSettings(
Settings.builder().put(ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING.getKey(), n))
.get().isAcknowledged());
}
/**
* Prints the current cluster state as debug logging.
*/
public void logClusterState() {
logger.debug("cluster state:\n{}\n{}",
client().admin().cluster().prepareState().get().getState(), client().admin().cluster().preparePendingClusterTasks().get());
}
/**
* Prints the segments info for the given indices as debug logging.
*/
public void logSegmentsState(String... indices) throws Exception {
IndicesSegmentResponse segsRsp = client().admin().indices().prepareSegments(indices).get();
logger.debug("segments {} state: \n{}", indices.length == 0 ? "[_all]" : indices,
segsRsp.toXContent(JsonXContent.contentBuilder().prettyPrint(), ToXContent.EMPTY_PARAMS).string());
}
/**
* Prints current memory stats as info logging.
*/
public void logMemoryStats() {
logger.info("memory: {}", Strings.toString(client().admin().cluster().prepareNodesStats().clear().setJvm(true).get(), true, true));
}
protected void ensureClusterSizeConsistency() {
if (cluster() != null && cluster().size() > 0) { // if static init fails the cluster can be null
logger.trace("Check consistency for [{}] nodes", cluster().size());
assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(cluster().size())).get());
}
}
/**
* Verifies that all nodes that have the same version of the cluster state as master have same cluster state
*/
protected void ensureClusterStateConsistency() throws IOException {
if (cluster() != null && cluster().size() > 0) {
final NamedWriteableRegistry namedWriteableRegistry = cluster().getNamedWriteableRegistry();
ClusterState masterClusterState = client().admin().cluster().prepareState().all().get().getState();
byte[] masterClusterStateBytes = ClusterState.Builder.toBytes(masterClusterState);
// remove local node reference
masterClusterState = ClusterState.Builder.fromBytes(masterClusterStateBytes, null, namedWriteableRegistry);
Map<String, Object> masterStateMap = convertToMap(masterClusterState);
int masterClusterStateSize = ClusterState.Builder.toBytes(masterClusterState).length;
String masterId = masterClusterState.nodes().getMasterNodeId();
for (Client client : cluster().getClients()) {
ClusterState localClusterState = client.admin().cluster().prepareState().all().setLocal(true).get().getState();
byte[] localClusterStateBytes = ClusterState.Builder.toBytes(localClusterState);
// remove local node reference
localClusterState = ClusterState.Builder.fromBytes(localClusterStateBytes, null, namedWriteableRegistry);
final Map<String, Object> localStateMap = convertToMap(localClusterState);
final int localClusterStateSize = ClusterState.Builder.toBytes(localClusterState).length;
// Check that the non-master node has the same version of the cluster state as the master and
// that the master node matches the master (otherwise there is no requirement for the cluster state to match)
if (masterClusterState.version() == localClusterState.version() && masterId.equals(localClusterState.nodes().getMasterNodeId())) {
try {
assertEquals("clusterstate UUID does not match", masterClusterState.stateUUID(), localClusterState.stateUUID());
// We cannot compare serialization bytes since serialization order of maps is not guaranteed
// but we can compare serialization sizes - they should be the same
assertEquals("clusterstate size does not match", masterClusterStateSize, localClusterStateSize);
// Compare JSON serialization
assertNull("clusterstate JSON serialization does not match", differenceBetweenMapsIgnoringArrayOrder(masterStateMap, localStateMap));
} catch (AssertionError error) {
logger.error("Cluster state from master:\n{}\nLocal cluster state:\n{}", masterClusterState.toString(), localClusterState.toString());
throw error;
}
}
}
}
}
/**
* Ensures the cluster is in a searchable state for the given indices. This means a searchable copy of each
* shard is available on the cluster.
*/
protected ClusterHealthStatus ensureSearchable(String... indices) {
// this is just a temporary thing but it's easier to change if it is encapsulated.
return ensureGreen(indices);
}
protected void ensureStableCluster(int nodeCount) {
ensureStableCluster(nodeCount, TimeValue.timeValueSeconds(30));
}
protected void ensureStableCluster(int nodeCount, TimeValue timeValue) {
ensureStableCluster(nodeCount, timeValue, false, null);
}
protected void ensureStableCluster(int nodeCount, @Nullable String viaNode) {
ensureStableCluster(nodeCount, TimeValue.timeValueSeconds(30), false, viaNode);
}
protected void ensureStableCluster(int nodeCount, TimeValue timeValue, boolean local, @Nullable String viaNode) {
if (viaNode == null) {
viaNode = randomFrom(internalCluster().getNodeNames());
}
logger.debug("ensuring cluster is stable with [{}] nodes. access node: [{}]. timeout: [{}]", nodeCount, viaNode, timeValue);
ClusterHealthResponse clusterHealthResponse = client(viaNode).admin().cluster().prepareHealth()
.setWaitForEvents(Priority.LANGUID)
.setWaitForNodes(Integer.toString(nodeCount))
.setTimeout(timeValue)
.setLocal(local)
.setWaitForNoRelocatingShards(true)
.get();
if (clusterHealthResponse.isTimedOut()) {
ClusterStateResponse stateResponse = client(viaNode).admin().cluster().prepareState().get();
fail("failed to reach a stable cluster of [" + nodeCount + "] nodes. Tried via [" + viaNode + "]. last cluster state:\n"
+ stateResponse.getState());
}
assertThat(clusterHealthResponse.isTimedOut(), is(false));
ensureFullyConnectedCluster();
}
/**
* Ensures that all nodes in the cluster are connected to each other.
*
* Some network disruptions may leave nodes that are not the master disconnected from each other.
* {@link org.elasticsearch.cluster.NodeConnectionsService} will eventually reconnect but it's
* handy to be able to ensure this happens faster
*/
protected void ensureFullyConnectedCluster() {
NetworkDisruption.ensureFullyConnectedCluster(internalCluster());
}
/**
* Syntactic sugar for:
* <pre>
* client().prepareIndex(index, type).setSource(source).execute().actionGet();
* </pre>
*/
protected final IndexResponse index(String index, String type, XContentBuilder source) {
return client().prepareIndex(index, type).setSource(source).execute().actionGet();
}
/**
* Syntactic sugar for:
* <pre>
* client().prepareIndex(index, type).setSource(source).execute().actionGet();
* </pre>
*/
protected final IndexResponse index(String index, String type, String id, Map<String, Object> source) {
return client().prepareIndex(index, type, id).setSource(source).execute().actionGet();
}
/**
* Syntactic sugar for:
* <pre>
* client().prepareGet(index, type, id).execute().actionGet();
* </pre>
*/
protected final GetResponse get(String index, String type, String id) {
return client().prepareGet(index, type, id).execute().actionGet();
}
/**
* Syntactic sugar for:
* <pre>
* return client().prepareIndex(index, type, id).setSource(source).execute().actionGet();
* </pre>
*/
protected final IndexResponse index(String index, String type, String id, XContentBuilder source) {
return client().prepareIndex(index, type, id).setSource(source).execute().actionGet();
}
/**
* Syntactic sugar for:
* <pre>
* return client().prepareIndex(index, type, id).setSource(source).execute().actionGet();
* </pre>
*/
protected final IndexResponse index(String index, String type, String id, Object... source) {
return client().prepareIndex(index, type, id).setSource(source).execute().actionGet();
}
/**
* Syntactic sugar for:
* <pre>
* return client().prepareIndex(index, type, id).setSource(source).execute().actionGet();
* </pre>
* <p>
* where source is a JSON String.
*/
protected final IndexResponse index(String index, String type, String id, String source) {
return client().prepareIndex(index, type, id).setSource(source, XContentType.JSON).execute().actionGet();
}
/**
* Waits for relocations and refreshes all indices in the cluster.
*
* @see #waitForRelocation()
*/
protected final RefreshResponse refresh(String... indices) {
waitForRelocation();
// TODO RANDOMIZE with flush?
RefreshResponse actionGet = client().admin().indices().prepareRefresh(indices).execute().actionGet();
assertNoFailures(actionGet);
return actionGet;
}
/**
* Flushes and refreshes all indices in the cluster
*/
protected final void flushAndRefresh(String... indices) {
flush(indices);
refresh(indices);
}
/**
* Flush some or all indices in the cluster.
*/
protected final FlushResponse flush(String... indices) {
waitForRelocation();
FlushResponse actionGet = client().admin().indices().prepareFlush(indices).execute().actionGet();
for (ShardOperationFailedException failure : actionGet.getShardFailures()) {
assertThat("unexpected flush failure " + failure.reason(), failure.status(), equalTo(RestStatus.SERVICE_UNAVAILABLE));
}
return actionGet;
}
/**
* Waits for all relocations and force merge all indices in the cluster to 1 segment.
*/
protected ForceMergeResponse forceMerge() {
waitForRelocation();
ForceMergeResponse actionGet = client().admin().indices().prepareForceMerge().setMaxNumSegments(1).execute().actionGet();
assertNoFailures(actionGet);
return actionGet;
}
/**
* Returns <code>true</code> iff the given index exists otherwise <code>false</code>
*/
protected boolean indexExists(String index) {
IndicesExistsResponse actionGet = client().admin().indices().prepareExists(index).execute().actionGet();
return actionGet.isExists();
}
/**
* Syntactic sugar for enabling allocation for <code>indices</code>
*/
protected final void enableAllocation(String... indices) {
client().admin().indices().prepareUpdateSettings(indices).setSettings(Settings.builder().put(
EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "all"
)).get();
}
/**
* Syntactic sugar for disabling allocation for <code>indices</code>
*/
protected final void disableAllocation(String... indices) {
client().admin().indices().prepareUpdateSettings(indices).setSettings(Settings.builder().put(
EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), "none"
)).get();
}
/**
* Returns a random admin client. This client can either be a node or a transport client pointing to any of
* the nodes in the cluster.
*/
protected AdminClient admin() {
return client().admin();
}
/**
* Convenience method that forwards to {@link #indexRandom(boolean, List)}.
*/
public void indexRandom(boolean forceRefresh, IndexRequestBuilder... builders) throws InterruptedException, ExecutionException {
indexRandom(forceRefresh, Arrays.asList(builders));
}
public void indexRandom(boolean forceRefresh, boolean dummyDocuments, IndexRequestBuilder... builders) throws InterruptedException, ExecutionException {
indexRandom(forceRefresh, dummyDocuments, Arrays.asList(builders));
}
/**
* Indexes the given {@link IndexRequestBuilder} instances randomly. It shuffles the given builders and either
* indexes them in a blocking or async fashion. This is very useful to catch problems that relate to internal document
* ids or index segment creations. Some features might have bug when a given document is the first or the last in a
* segment or if only one document is in a segment etc. This method prevents issues like this by randomizing the index
* layout.
*
* @param forceRefresh if <tt>true</tt> all involved indices are refreshed once the documents are indexed. Additionally if <tt>true</tt>
* some empty dummy documents are may be randomly inserted into the document list and deleted once all documents are indexed.
* This is useful to produce deleted documents on the server side.
* @param builders the documents to index.
* @see #indexRandom(boolean, boolean, java.util.List)
*/
public void indexRandom(boolean forceRefresh, List<IndexRequestBuilder> builders) throws InterruptedException, ExecutionException {
indexRandom(forceRefresh, forceRefresh, builders);
}
/**
* Indexes the given {@link IndexRequestBuilder} instances randomly. It shuffles the given builders and either
* indexes them in a blocking or async fashion. This is very useful to catch problems that relate to internal document
* ids or index segment creations. Some features might have bug when a given document is the first or the last in a
* segment or if only one document is in a segment etc. This method prevents issues like this by randomizing the index
* layout.
*
* @param forceRefresh if <tt>true</tt> all involved indices are refreshed once the documents are indexed.
* @param dummyDocuments if <tt>true</tt> some empty dummy documents may be randomly inserted into the document list and deleted once
* all documents are indexed. This is useful to produce deleted documents on the server side.
* @param builders the documents to index.
*/
public void indexRandom(boolean forceRefresh, boolean dummyDocuments, List<IndexRequestBuilder> builders) throws InterruptedException, ExecutionException {
indexRandom(forceRefresh, dummyDocuments, true, builders);
}
/**
* Indexes the given {@link IndexRequestBuilder} instances randomly. It shuffles the given builders and either
* indexes them in a blocking or async fashion. This is very useful to catch problems that relate to internal document
* ids or index segment creations. Some features might have bug when a given document is the first or the last in a
* segment or if only one document is in a segment etc. This method prevents issues like this by randomizing the index
* layout.
*
* @param forceRefresh if <tt>true</tt> all involved indices are refreshed once the documents are indexed.
* @param dummyDocuments if <tt>true</tt> some empty dummy documents may be randomly inserted into the document list and deleted once
* all documents are indexed. This is useful to produce deleted documents on the server side.
* @param maybeFlush if <tt>true</tt> this method may randomly execute full flushes after index operations.
* @param builders the documents to index.
*/
public void indexRandom(boolean forceRefresh, boolean dummyDocuments, boolean maybeFlush, List<IndexRequestBuilder> builders) throws InterruptedException, ExecutionException {
Random random = random();
Map<String, Set<String>> indicesAndTypes = new HashMap<>();
for (IndexRequestBuilder builder : builders) {
final Set<String> types = indicesAndTypes.computeIfAbsent(builder.request().index(), index -> new HashSet<>());
types.add(builder.request().type());
}
Set<List<String>> bogusIds = new HashSet<>(); // (index, type, id)
if (random.nextBoolean() && !builders.isEmpty() && dummyDocuments) {
builders = new ArrayList<>(builders);
// inject some bogus docs
final int numBogusDocs = scaledRandomIntBetween(1, builders.size() * 2);
final int unicodeLen = between(1, 10);
for (int i = 0; i < numBogusDocs; i++) {
String id = "bogus_doc_" + randomRealisticUnicodeOfLength(unicodeLen) + Integer.toString(dummmyDocIdGenerator.incrementAndGet());
Map.Entry<String, Set<String>> indexAndTypes = RandomPicks.randomFrom(random, indicesAndTypes.entrySet());
String index = indexAndTypes.getKey();
String type = RandomPicks.randomFrom(random, indexAndTypes.getValue());
bogusIds.add(Arrays.asList(index, type, id));
// We configure a routing key in case the mapping requires it
builders.add(client().prepareIndex(index, type, id).setSource("{}", XContentType.JSON).setRouting(id));
}
}
Collections.shuffle(builders, random());
final CopyOnWriteArrayList<Tuple<IndexRequestBuilder, Exception>> errors = new CopyOnWriteArrayList<>();
List<CountDownLatch> inFlightAsyncOperations = new ArrayList<>();
// If you are indexing just a few documents then frequently do it one at a time. If many then frequently in bulk.
final String[] indices = indicesAndTypes.keySet().toArray(new String[0]);
if (builders.size() < FREQUENT_BULK_THRESHOLD ? frequently() : builders.size() < ALWAYS_BULK_THRESHOLD ? rarely() : false) {
if (frequently()) {
logger.info("Index [{}] docs async: [{}] bulk: [{}]", builders.size(), true, false);
for (IndexRequestBuilder indexRequestBuilder : builders) {
indexRequestBuilder.execute(new PayloadLatchedActionListener<IndexResponse, IndexRequestBuilder>(indexRequestBuilder, newLatch(inFlightAsyncOperations), errors));
postIndexAsyncActions(indices, inFlightAsyncOperations, maybeFlush);
}
} else {
logger.info("Index [{}] docs async: [{}] bulk: [{}]", builders.size(), false, false);
for (IndexRequestBuilder indexRequestBuilder : builders) {
indexRequestBuilder.execute().actionGet();
postIndexAsyncActions(indices, inFlightAsyncOperations, maybeFlush);
}
}
} else {
List<List<IndexRequestBuilder>> partition = eagerPartition(builders, Math.min(MAX_BULK_INDEX_REQUEST_SIZE,
Math.max(1, (int) (builders.size() * randomDouble()))));
logger.info("Index [{}] docs async: [{}] bulk: [{}] partitions [{}]", builders.size(), false, true, partition.size());
for (List<IndexRequestBuilder> segmented : partition) {
BulkRequestBuilder bulkBuilder = client().prepareBulk();
for (IndexRequestBuilder indexRequestBuilder : segmented) {
bulkBuilder.add(indexRequestBuilder);
}
BulkResponse actionGet = bulkBuilder.execute().actionGet();
assertThat(actionGet.hasFailures() ? actionGet.buildFailureMessage() : "", actionGet.hasFailures(), equalTo(false));
}
}
for (CountDownLatch operation : inFlightAsyncOperations) {
operation.await();
}
final List<Exception> actualErrors = new ArrayList<>();
for (Tuple<IndexRequestBuilder, Exception> tuple : errors) {
if (ExceptionsHelper.unwrapCause(tuple.v2()) instanceof EsRejectedExecutionException) {
tuple.v1().execute().actionGet(); // re-index if rejected
} else {
actualErrors.add(tuple.v2());
}
}
assertThat(actualErrors, emptyIterable());
if (!bogusIds.isEmpty()) {
// delete the bogus types again - it might trigger merges or at least holes in the segments and enforces deleted docs!
for (List<String> doc : bogusIds) {
assertEquals("failed to delete a dummy doc [" + doc.get(0) + "][" + doc.get(2) + "]",
DocWriteResponse.Result.DELETED,
client().prepareDelete(doc.get(0), doc.get(1), doc.get(2)).setRouting(doc.get(2)).get().getResult());
}
}
if (forceRefresh) {
assertNoFailures(client().admin().indices().prepareRefresh(indices).setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute().get());
}
}
private AtomicInteger dummmyDocIdGenerator = new AtomicInteger();
/** Disables an index block for the specified index */
public static void disableIndexBlock(String index, String block) {
Settings settings = Settings.builder().put(block, false).build();
client().admin().indices().prepareUpdateSettings(index).setSettings(settings).get();
}
/** Enables an index block for the specified index */
public static void enableIndexBlock(String index, String block) {
Settings settings = Settings.builder().put(block, true).build();
client().admin().indices().prepareUpdateSettings(index).setSettings(settings).get();
}
/** Sets or unsets the cluster read_only mode **/
public static void setClusterReadOnly(boolean value) {
Settings settings = value ? Settings.builder().put(MetaData.SETTING_READ_ONLY_SETTING.getKey(), value).build() :
Settings.builder().putNull(MetaData.SETTING_READ_ONLY_SETTING.getKey()).build() ;
assertAcked(client().admin().cluster().prepareUpdateSettings().setTransientSettings(settings).get());
}
private static CountDownLatch newLatch(List<CountDownLatch> latches) {
CountDownLatch l = new CountDownLatch(1);
latches.add(l);
return l;
}
/**
* Maybe refresh, force merge, or flush then always make sure there aren't too many in flight async operations.
*/
private void postIndexAsyncActions(String[] indices, List<CountDownLatch> inFlightAsyncOperations, boolean maybeFlush) throws InterruptedException {
if (rarely()) {
if (rarely()) {
client().admin().indices().prepareRefresh(indices).setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute(
new LatchedActionListener<>(newLatch(inFlightAsyncOperations)));
} else if (maybeFlush && rarely()) {
if (randomBoolean()) {
client().admin().indices().prepareFlush(indices).setIndicesOptions(IndicesOptions.lenientExpandOpen()).execute(
new LatchedActionListener<>(newLatch(inFlightAsyncOperations)));
} else {
client().admin().indices().syncedFlush(syncedFlushRequest(indices).indicesOptions(IndicesOptions.lenientExpandOpen()),
new LatchedActionListener<>(newLatch(inFlightAsyncOperations)));
}
} else if (rarely()) {
client().admin().indices().prepareForceMerge(indices).setIndicesOptions(IndicesOptions.lenientExpandOpen()).setMaxNumSegments(between(1, 10)).setFlush(maybeFlush && randomBoolean()).execute(
new LatchedActionListener<>(newLatch(inFlightAsyncOperations)));
}
}
while (inFlightAsyncOperations.size() > MAX_IN_FLIGHT_ASYNC_INDEXES) {
int waitFor = between(0, inFlightAsyncOperations.size() - 1);
inFlightAsyncOperations.remove(waitFor).await();
}
}
/**
* The scope of a test cluster used together with
* {@link ESIntegTestCase.ClusterScope} annotations on {@link ESIntegTestCase} subclasses.
*/
public enum Scope {
/**
* A cluster shared across all method in a single test suite
*/
SUITE,
/**
* A test exclusive test cluster
*/
TEST
}
/**
* Defines a cluster scope for a {@link ESIntegTestCase} subclass.
* By default if no {@link ClusterScope} annotation is present {@link ESIntegTestCase.Scope#SUITE} is used
* together with randomly chosen settings like number of nodes etc.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE})
public @interface ClusterScope {
/**
* Returns the scope. {@link ESIntegTestCase.Scope#SUITE} is default.
*/
Scope scope() default Scope.SUITE;
/**
* Returns the number of nodes in the cluster. Default is <tt>-1</tt> which means
* a random number of nodes is used, where the minimum and maximum number of nodes
* are either the specified ones or the default ones if not specified.
*/
int numDataNodes() default -1;
/**
* Returns the minimum number of data nodes in the cluster. Default is <tt>-1</tt>.
* Ignored when {@link ClusterScope#numDataNodes()} is set.
*/
int minNumDataNodes() default -1;
/**
* Returns the maximum number of data nodes in the cluster. Default is <tt>-1</tt>.
* Ignored when {@link ClusterScope#numDataNodes()} is set.
*/
int maxNumDataNodes() default -1;
/**
* Indicates whether the cluster can have dedicated master nodes. If <tt>false</tt> means data nodes will serve as master nodes
* and there will be no dedicated master (and data) nodes. Default is <tt>true</tt> which means
* dedicated master nodes will be randomly used.
*/
boolean supportsDedicatedMasters() default true;
/**
* The cluster automatically manages the {@link ElectMasterService#DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING} by default
* as nodes are started and stopped. Set this to false to manage the setting manually.
*/
boolean autoMinMasterNodes() default true;
/**
* Returns the number of client nodes in the cluster. Default is {@link InternalTestCluster#DEFAULT_NUM_CLIENT_NODES}, a
* negative value means that the number of client nodes will be randomized.
*/
int numClientNodes() default InternalTestCluster.DEFAULT_NUM_CLIENT_NODES;
/**
* Returns the transport client ratio. By default this returns <code>-1</code> which means a random
* ratio in the interval <code>[0..1]</code> is used.
*/
double transportClientRatio() default -1;
}
private class LatchedActionListener<Response> implements ActionListener<Response> {
private final CountDownLatch latch;
LatchedActionListener(CountDownLatch latch) {
this.latch = latch;
}
@Override
public final void onResponse(Response response) {
latch.countDown();
}
@Override
public final void onFailure(Exception t) {
try {
logger.info("Action Failed", t);
addError(t);
} finally {
latch.countDown();
}
}
protected void addError(Exception e) {
}
}
private class PayloadLatchedActionListener<Response, T> extends LatchedActionListener<Response> {
private final CopyOnWriteArrayList<Tuple<T, Exception>> errors;
private final T builder;
PayloadLatchedActionListener(T builder, CountDownLatch latch, CopyOnWriteArrayList<Tuple<T, Exception>> errors) {
super(latch);
this.errors = errors;
this.builder = builder;
}
@Override
protected void addError(Exception e) {
errors.add(new Tuple<>(builder, e));
}
}
/**
* Clears the given scroll Ids
*/
public void clearScroll(String... scrollIds) {
ClearScrollResponse clearResponse = client().prepareClearScroll()
.setScrollIds(Arrays.asList(scrollIds)).get();
assertThat(clearResponse.isSucceeded(), equalTo(true));
}
private static <A extends Annotation> A getAnnotation(Class<?> clazz, Class<A> annotationClass) {
if (clazz == Object.class || clazz == ESIntegTestCase.class) {
return null;
}
A annotation = clazz.getAnnotation(annotationClass);
if (annotation != null) {
return annotation;
}
return getAnnotation(clazz.getSuperclass(), annotationClass);
}
private Scope getCurrentClusterScope() {
return getCurrentClusterScope(this.getClass());
}
private static Scope getCurrentClusterScope(Class<?> clazz) {
ClusterScope annotation = getAnnotation(clazz, ClusterScope.class);
// if we are not annotated assume suite!
return annotation == null ? Scope.SUITE : annotation.scope();
}
private boolean getSupportsDedicatedMasters() {
ClusterScope annotation = getAnnotation(this.getClass(), ClusterScope.class);
return annotation == null ? true : annotation.supportsDedicatedMasters();
}
private boolean getAutoMinMasterNodes() {
ClusterScope annotation = getAnnotation(this.getClass(), ClusterScope.class);
return annotation == null ? true : annotation.autoMinMasterNodes();
}
private int getNumDataNodes() {
ClusterScope annotation = getAnnotation(this.getClass(), ClusterScope.class);
return annotation == null ? -1 : annotation.numDataNodes();
}
private int getMinNumDataNodes() {
ClusterScope annotation = getAnnotation(this.getClass(), ClusterScope.class);
return annotation == null || annotation.minNumDataNodes() == -1 ? InternalTestCluster.DEFAULT_MIN_NUM_DATA_NODES : annotation.minNumDataNodes();
}
private int getMaxNumDataNodes() {
ClusterScope annotation = getAnnotation(this.getClass(), ClusterScope.class);
return annotation == null || annotation.maxNumDataNodes() == -1 ? InternalTestCluster.DEFAULT_MAX_NUM_DATA_NODES : annotation.maxNumDataNodes();
}
private int getNumClientNodes() {
ClusterScope annotation = getAnnotation(this.getClass(), ClusterScope.class);
return annotation == null ? InternalTestCluster.DEFAULT_NUM_CLIENT_NODES : annotation.numClientNodes();
}
/**
* This method is used to obtain settings for the <tt>Nth</tt> node in the cluster.
* Nodes in this cluster are associated with an ordinal number such that nodes can
* be started with specific configurations. This method might be called multiple
* times with the same ordinal and is expected to return the same value for each invocation.
* In other words subclasses must ensure this method is idempotent.
*/
protected Settings nodeSettings(int nodeOrdinal) {
Settings.Builder builder = Settings.builder()
.put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), Integer.MAX_VALUE)
// Default the watermarks to absurdly low to prevent the tests
// from failing on nodes without enough disk space
.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "1b")
.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "1b")
.put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.getKey(), "1b")
.put(ScriptService.SCRIPT_MAX_COMPILATIONS_RATE.getKey(), "2048/1m")
// by default we never cache below 10k docs in a segment,
// bypass this limit so that caching gets some testing in
// integration tests that usually create few documents
.put(IndicesQueryCache.INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING.getKey(), nodeOrdinal % 2 == 0)
// wait short time for other active shards before actually deleting, default 30s not needed in tests
.put(IndicesStore.INDICES_STORE_DELETE_SHARD_TIMEOUT.getKey(), new TimeValue(1, TimeUnit.SECONDS))
// randomly enable low-level search cancellation to make sure it does not alter results
.put(SearchService.LOW_LEVEL_CANCELLATION_SETTING.getKey(), randomBoolean());
if (rarely()) {
// Sometimes adjust the minimum search thread pool size, causing
// QueueResizingEsThreadPoolExecutor to be used instead of a regular
// fixed thread pool
builder.put("thread_pool.search.min_queue_size", 100);
}
return builder.build();
}
protected Path nodeConfigPath(int nodeOrdinal) {
return null;
}
/**
* Returns a collection of plugins that should be loaded on each node.
*/
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.emptyList();
}
/**
* Returns a collection of plugins that should be loaded when creating a transport client.
*/
protected Collection<Class<? extends Plugin>> transportClientPlugins() {
return Collections.emptyList();
}
/**
* This method is used to obtain additional settings for clients created by the internal cluster.
* These settings will be applied on the client in addition to some randomized settings defined in
* the cluster. These settings will also override any other settings the internal cluster might
* add by default.
*/
protected Settings transportClientSettings() {
return Settings.EMPTY;
}
private ExternalTestCluster buildExternalCluster(String clusterAddresses) throws IOException {
String[] stringAddresses = clusterAddresses.split(",");
TransportAddress[] transportAddresses = new TransportAddress[stringAddresses.length];
int i = 0;
for (String stringAddress : stringAddresses) {
URL url = new URL("http://" + stringAddress);
InetAddress inetAddress = InetAddress.getByName(url.getHost());
transportAddresses[i++] = new TransportAddress(new InetSocketAddress(inetAddress, url.getPort()));
}
return new ExternalTestCluster(createTempDir(), externalClusterClientSettings(), transportClientPlugins(), transportAddresses);
}
protected Settings externalClusterClientSettings() {
return Settings.EMPTY;
}
protected boolean ignoreExternalCluster() {
return false;
}
protected TestCluster buildTestCluster(Scope scope, long seed) throws IOException {
String clusterAddresses = System.getProperty(TESTS_CLUSTER);
if (Strings.hasLength(clusterAddresses) && ignoreExternalCluster() == false) {
if (scope == Scope.TEST) {
throw new IllegalArgumentException("Cannot run TEST scope test with " + TESTS_CLUSTER);
}
return buildExternalCluster(clusterAddresses);
}
final String nodePrefix;
switch (scope) {
case TEST:
nodePrefix = TEST_CLUSTER_NODE_PREFIX;
break;
case SUITE:
nodePrefix = SUITE_CLUSTER_NODE_PREFIX;
break;
default:
throw new ElasticsearchException("Scope not supported: " + scope);
}
boolean supportsDedicatedMasters = getSupportsDedicatedMasters();
int numDataNodes = getNumDataNodes();
int minNumDataNodes;
int maxNumDataNodes;
if (numDataNodes >= 0) {
minNumDataNodes = maxNumDataNodes = numDataNodes;
} else {
minNumDataNodes = getMinNumDataNodes();
maxNumDataNodes = getMaxNumDataNodes();
}
Collection<Class<? extends Plugin>> mockPlugins = getMockPlugins();
final NodeConfigurationSource nodeConfigurationSource = getNodeConfigSource();
if (addMockTransportService()) {
ArrayList<Class<? extends Plugin>> mocks = new ArrayList<>(mockPlugins);
// add both mock plugins - local and tcp if they are not there
// we do this in case somebody overrides getMockPlugins and misses to call super
if (mockPlugins.contains(getTestTransportPlugin()) == false) {
mocks.add(getTestTransportPlugin());
}
mockPlugins = mocks;
}
return new InternalTestCluster(seed, createTempDir(), supportsDedicatedMasters, getAutoMinMasterNodes(),
minNumDataNodes, maxNumDataNodes,
InternalTestCluster.clusterName(scope.name(), seed) + "-cluster", nodeConfigurationSource, getNumClientNodes(),
InternalTestCluster.DEFAULT_ENABLE_HTTP_PIPELINING, nodePrefix, mockPlugins, getClientWrapper());
}
protected NodeConfigurationSource getNodeConfigSource() {
Settings.Builder networkSettings = Settings.builder();
if (addMockTransportService()) {
networkSettings.put(NetworkModule.TRANSPORT_TYPE_KEY, getTestTransportType());
}
NodeConfigurationSource nodeConfigurationSource = new NodeConfigurationSource() {
@Override
public Settings nodeSettings(int nodeOrdinal) {
return Settings.builder()
.put(NetworkModule.HTTP_ENABLED.getKey(), false)
.put(networkSettings.build())
.put(ESIntegTestCase.this.nodeSettings(nodeOrdinal)).build();
}
@Override
public Path nodeConfigPath(int nodeOrdinal) {
return ESIntegTestCase.this.nodeConfigPath(nodeOrdinal);
}
@Override
public Collection<Class<? extends Plugin>> nodePlugins() {
return ESIntegTestCase.this.nodePlugins();
}
@Override
public Settings transportClientSettings() {
return Settings.builder().put(networkSettings.build())
.put(ESIntegTestCase.this.transportClientSettings()).build();
}
@Override
public Collection<Class<? extends Plugin>> transportClientPlugins() {
Collection<Class<? extends Plugin>> plugins = ESIntegTestCase.this.transportClientPlugins();
if (plugins.contains(getTestTransportPlugin()) == false) {
plugins = new ArrayList<>(plugins);
plugins.add(getTestTransportPlugin());
}
return Collections.unmodifiableCollection(plugins);
}
};
return nodeConfigurationSource;
}
/**
* Iff this returns true mock transport implementations are used for the test runs. Otherwise not mock transport impls are used.
* The default is <tt>true</tt>
*/
protected boolean addMockTransportService() {
return true;
}
/**
* Iff this returns true test zen discovery implementations is used for the test runs.
* The default is <tt>true</tt>
*/
protected boolean addTestZenDiscovery() {
return true;
}
/**
* Returns a function that allows to wrap / filter all clients that are exposed by the test cluster. This is useful
* for debugging or request / response pre and post processing. It also allows to intercept all calls done by the test
* framework. By default this method returns an identity function {@link Function#identity()}.
*/
protected Function<Client,Client> getClientWrapper() {
return Function.identity();
}
/** Return the mock plugins the cluster should use */
protected Collection<Class<? extends Plugin>> getMockPlugins() {
final ArrayList<Class<? extends Plugin>> mocks = new ArrayList<>();
if (MOCK_MODULES_ENABLED && randomBoolean()) { // sometimes run without those completely
if (randomBoolean() && addMockTransportService()) {
mocks.add(MockTransportService.TestPlugin.class);
}
if (randomBoolean()) {
mocks.add(MockFSIndexStore.TestPlugin.class);
}
if (randomBoolean()) {
mocks.add(NodeMocksPlugin.class);
}
if (randomBoolean()) {
mocks.add(MockEngineFactoryPlugin.class);
}
if (randomBoolean()) {
mocks.add(MockSearchService.TestPlugin.class);
}
if (randomBoolean()) {
mocks.add(AssertingTransportInterceptor.TestPlugin.class);
}
if (randomBoolean()) {
mocks.add(MockFieldFilterPlugin.class);
}
}
if (addMockTransportService()) {
mocks.add(getTestTransportPlugin());
}
if (addTestZenDiscovery()) {
mocks.add(TestZenDiscovery.TestPlugin.class);
}
mocks.add(TestSeedPlugin.class);
return Collections.unmodifiableList(mocks);
}
public static final class TestSeedPlugin extends Plugin {
@Override
public List<Setting<?>> getSettings() {
return Arrays.asList(INDEX_TEST_SEED_SETTING);
}
}
/**
* Returns the client ratio configured via
*/
private static double transportClientRatio() {
String property = System.getProperty(TESTS_CLIENT_RATIO);
if (property == null || property.isEmpty()) {
return Double.NaN;
}
return Double.parseDouble(property);
}
/**
* Returns the transport client ratio from the class level annotation or via
* {@link System#getProperty(String)} if available. If both are not available this will
* return a random ratio in the interval <tt>[0..1]</tt>
*/
protected double getPerTestTransportClientRatio() {
final ClusterScope annotation = getAnnotation(this.getClass(), ClusterScope.class);
double perTestRatio = -1;
if (annotation != null) {
perTestRatio = annotation.transportClientRatio();
}
if (perTestRatio == -1) {
return Double.isNaN(TRANSPORT_CLIENT_RATIO) ? randomDouble() : TRANSPORT_CLIENT_RATIO;
}
assert perTestRatio >= 0.0 && perTestRatio <= 1.0;
return perTestRatio;
}
/**
* Returns path to a random directory that can be used to create a temporary file system repo
*/
public Path randomRepoPath() {
if (currentCluster instanceof InternalTestCluster) {
return randomRepoPath(((InternalTestCluster) currentCluster).getDefaultSettings());
}
throw new UnsupportedOperationException("unsupported cluster type");
}
/**
* Returns path to a random directory that can be used to create a temporary file system repo
*/
public static Path randomRepoPath(Settings settings) {
Environment environment = TestEnvironment.newEnvironment(settings);
Path[] repoFiles = environment.repoFiles();
assert repoFiles.length > 0;
Path path;
do {
path = repoFiles[0].resolve(randomAlphaOfLength(10));
} while (Files.exists(path));
return path;
}
protected NumShards getNumShards(String index) {
MetaData metaData = client().admin().cluster().prepareState().get().getState().metaData();
assertThat(metaData.hasIndex(index), equalTo(true));
int numShards = Integer.valueOf(metaData.index(index).getSettings().get(SETTING_NUMBER_OF_SHARDS));
int numReplicas = Integer.valueOf(metaData.index(index).getSettings().get(SETTING_NUMBER_OF_REPLICAS));
return new NumShards(numShards, numReplicas);
}
/**
* Asserts that all shards are allocated on nodes matching the given node pattern.
*/
public Set<String> assertAllShardsOnNodes(String index, String... pattern) {
Set<String> nodes = new HashSet<>();
ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState();
for (IndexRoutingTable indexRoutingTable : clusterState.routingTable()) {
for (IndexShardRoutingTable indexShardRoutingTable : indexRoutingTable) {
for (ShardRouting shardRouting : indexShardRoutingTable) {
if (shardRouting.currentNodeId() != null && index.equals(shardRouting.getIndexName())) {
String name = clusterState.nodes().get(shardRouting.currentNodeId()).getName();
nodes.add(name);
assertThat("Allocated on new node: " + name, Regex.simpleMatch(pattern, name), is(true));
}
}
}
}
return nodes;
}
/**
* Asserts that all segments are sorted with the provided {@link Sort}.
*/
public void assertSortedSegments(String indexName, Sort expectedIndexSort) {
IndicesSegmentResponse segmentResponse =
client().admin().indices().prepareSegments(indexName).execute().actionGet();
IndexSegments indexSegments = segmentResponse.getIndices().get(indexName);
for (IndexShardSegments indexShardSegments : indexSegments.getShards().values()) {
for (ShardSegments shardSegments : indexShardSegments.getShards()) {
for (Segment segment : shardSegments) {
assertThat(expectedIndexSort, equalTo(segment.getSegmentSort()));
}
}
}
}
protected static class NumShards {
public final int numPrimaries;
public final int numReplicas;
public final int totalNumShards;
public final int dataCopies;
private NumShards(int numPrimaries, int numReplicas) {
this.numPrimaries = numPrimaries;
this.numReplicas = numReplicas;
this.dataCopies = numReplicas + 1;
this.totalNumShards = numPrimaries * dataCopies;
}
}
private static boolean runTestScopeLifecycle() {
return INSTANCE == null;
}
@Before
public final void setupTestCluster() throws Exception {
if (runTestScopeLifecycle()) {
printTestMessage("setting up");
beforeInternal();
printTestMessage("all set up");
}
}
@After
public final void cleanUpCluster() throws Exception {
// Deleting indices is going to clear search contexts implicitly so we
// need to check that there are no more in-flight search contexts before
// we remove indices
super.ensureAllSearchContextsReleased();
if (runTestScopeLifecycle()) {
printTestMessage("cleaning up after");
afterInternal(false);
printTestMessage("cleaned up after");
}
}
@AfterClass
public static void afterClass() throws Exception {
if (!runTestScopeLifecycle()) {
try {
INSTANCE.printTestMessage("cleaning up after");
INSTANCE.afterInternal(true);
checkStaticState(true);
} finally {
INSTANCE = null;
}
} else {
clearClusters();
}
SUITE_SEED = null;
currentCluster = null;
}
private static void initializeSuiteScope() throws Exception {
Class<?> targetClass = getTestClass();
/**
* Note we create these test class instance via reflection
* since JUnit creates a new instance per test and that is also
* the reason why INSTANCE is static since this entire method
* must be executed in a static context.
*/
assert INSTANCE == null;
if (isSuiteScopedTest(targetClass)) {
// note we need to do this this way to make sure this is reproducible
INSTANCE = (ESIntegTestCase) targetClass.getConstructor().newInstance();
boolean success = false;
try {
INSTANCE.printTestMessage("setup");
INSTANCE.beforeInternal();
INSTANCE.setupSuiteScopeCluster();
success = true;
} finally {
if (!success) {
afterClass();
}
}
} else {
INSTANCE = null;
}
}
/**
* Compute a routing key that will route documents to the <code>shard</code>-th shard
* of the provided index.
*/
protected String routingKeyForShard(String index, int shard) {
return internalCluster().routingKeyForShard(resolveIndex(index), shard, random());
}
@Override
protected NamedXContentRegistry xContentRegistry() {
if (isInternalCluster() && cluster().size() > 0) {
// If it's internal cluster - using existing registry in case plugin registered custom data
return internalCluster().getInstance(NamedXContentRegistry.class);
} else {
// If it's external cluster - fall back to the standard set
return new NamedXContentRegistry(ClusterModule.getNamedXWriteables());
}
}
/**
* Returns an instance of {@link RestClient} pointing to the current test cluster.
* Creates a new client if the method is invoked for the first time in the context of the current test scope.
* The returned client gets automatically closed when needed, it shouldn't be closed as part of tests otherwise
* it cannot be reused by other tests anymore.
*/
protected static synchronized RestClient getRestClient() {
if (restClient == null) {
restClient = createRestClient(null);
}
return restClient;
}
protected static RestClient createRestClient(RestClientBuilder.HttpClientConfigCallback httpClientConfigCallback) {
return createRestClient(httpClientConfigCallback, "http");
}
protected static RestClient createRestClient(RestClientBuilder.HttpClientConfigCallback httpClientConfigCallback, String protocol) {
NodesInfoResponse nodesInfoResponse = client().admin().cluster().prepareNodesInfo().get();
assertFalse(nodesInfoResponse.hasFailures());
return createRestClient(nodesInfoResponse.getNodes(), httpClientConfigCallback, protocol);
}
protected static RestClient createRestClient(final List<NodeInfo> nodes,
RestClientBuilder.HttpClientConfigCallback httpClientConfigCallback, String protocol) {
List<HttpHost> hosts = new ArrayList<>();
for (NodeInfo node : nodes) {
if (node.getHttp() != null) {
TransportAddress publishAddress = node.getHttp().address().publishAddress();
InetSocketAddress address = publishAddress.address();
hosts.add(new HttpHost(NetworkAddress.format(address.getAddress()), address.getPort(), protocol));
}
}
RestClientBuilder builder = RestClient.builder(hosts.toArray(new HttpHost[hosts.size()]));
if (httpClientConfigCallback != null) {
builder.setHttpClientConfigCallback(httpClientConfigCallback);
}
return builder.build();
}
/**
* This method is executed iff the test is annotated with {@link SuiteScopeTestCase}
* before the first test of this class is executed.
*
* @see SuiteScopeTestCase
*/
protected void setupSuiteScopeCluster() throws Exception {
}
private static boolean isSuiteScopedTest(Class<?> clazz) {
return clazz.getAnnotation(SuiteScopeTestCase.class) != null;
}
/**
* If a test is annotated with {@link SuiteScopeTestCase}
* the checks and modifications that are applied to the used test cluster are only done after all tests
* of this class are executed. This also has the side-effect of a suite level setup method {@link #setupSuiteScopeCluster()}
* that is executed in a separate test instance. Variables that need to be accessible across test instances must be static.
*/
@Retention(RetentionPolicy.RUNTIME)
@Inherited
@Target(ElementType.TYPE)
public @interface SuiteScopeTestCase {
}
public static Index resolveIndex(String index) {
GetIndexResponse getIndexResponse = client().admin().indices().prepareGetIndex().setIndices(index).get();
assertTrue("index " + index + " not found", getIndexResponse.getSettings().containsKey(index));
String uuid = getIndexResponse.getSettings().get(index).get(IndexMetaData.SETTING_INDEX_UUID);
return new Index(index, uuid);
}
protected void assertSeqNos() throws Exception {
assertBusy(() -> {
IndicesStatsResponse stats = client().admin().indices().prepareStats().clear().get();
for (IndexStats indexStats : stats.getIndices().values()) {
for (IndexShardStats indexShardStats : indexStats.getIndexShards().values()) {
Optional<ShardStats> maybePrimary = Stream.of(indexShardStats.getShards())
.filter(s -> s.getShardRouting().active() && s.getShardRouting().primary())
.findFirst();
if (maybePrimary.isPresent() == false) {
continue;
}
ShardStats primary = maybePrimary.get();
final SeqNoStats primarySeqNoStats = primary.getSeqNoStats();
final ShardRouting primaryShardRouting = primary.getShardRouting();
assertThat(primaryShardRouting + " should have set the global checkpoint",
primarySeqNoStats.getGlobalCheckpoint(), not(equalTo(SequenceNumbers.UNASSIGNED_SEQ_NO)));
final DiscoveryNode node = clusterService().state().nodes().get(primaryShardRouting.currentNodeId());
final IndicesService indicesService =
internalCluster().getInstance(IndicesService.class, node.getName());
final IndexShard indexShard = indicesService.getShardOrNull(primaryShardRouting.shardId());
final ObjectLongMap<String> globalCheckpoints = indexShard.getInSyncGlobalCheckpoints();
for (ShardStats shardStats : indexShardStats) {
final SeqNoStats seqNoStats = shardStats.getSeqNoStats();
assertThat(shardStats.getShardRouting() + " local checkpoint mismatch",
seqNoStats.getLocalCheckpoint(), equalTo(primarySeqNoStats.getLocalCheckpoint()));
assertThat(shardStats.getShardRouting() + " global checkpoint mismatch",
seqNoStats.getGlobalCheckpoint(), equalTo(primarySeqNoStats.getGlobalCheckpoint()));
assertThat(shardStats.getShardRouting() + " max seq no mismatch",
seqNoStats.getMaxSeqNo(), equalTo(primarySeqNoStats.getMaxSeqNo()));
// the local knowledge on the primary of the global checkpoint equals the global checkpoint on the shard
assertThat(
seqNoStats.getGlobalCheckpoint(),
equalTo(globalCheckpoints.get(shardStats.getShardRouting().allocationId().getId())));
}
}
}
});
}
}
| apache-2.0 |
pennello/jdbi | core/src/main/java/org/jdbi/v3/core/mapper/InferredRowMapperFactory.java | 1656 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jdbi.v3.core.mapper;
import static org.jdbi.v3.core.generic.GenericTypes.findGenericParameter;
import java.lang.reflect.Type;
import java.util.Optional;
import org.jdbi.v3.core.config.ConfigRegistry;
/**
* A generic RowMapperFactory that reflectively inspects a
* {@code RowMapper<T>} and maps only to columns of type
* {@code T}. The type parameter T must be accessible
* via reflection or an {@link UnsupportedOperationException}
* will be thrown.
*/
class InferredRowMapperFactory implements RowMapperFactory
{
private final Type maps;
private final RowMapper<?> mapper;
InferredRowMapperFactory(RowMapper<?> mapper)
{
this.maps = findGenericParameter(mapper.getClass(), RowMapper.class)
.orElseThrow(() -> new UnsupportedOperationException("Must use a concretely typed RowMapper here"));
this.mapper = mapper;
}
@Override
public Optional<RowMapper<?>> build(Type type, ConfigRegistry config) {
return maps.equals(type)
? Optional.of(mapper)
: Optional.empty();
}
}
| apache-2.0 |
dbflute-session/lastaflute-test-catalog | src/main/java/org/docksidestage/dbflute/bsentity/BsMemberFollowing.java | 14068 | /*
* Copyright 2015-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.docksidestage.dbflute.bsentity;
import java.util.List;
import java.util.ArrayList;
import org.dbflute.Entity;
import org.dbflute.dbmeta.DBMeta;
import org.dbflute.dbmeta.AbstractEntity;
import org.dbflute.dbmeta.accessory.DomainEntity;
import org.dbflute.optional.OptionalEntity;
import org.docksidestage.dbflute.allcommon.DBMetaInstanceHandler;
import org.docksidestage.dbflute.exentity.*;
/**
* The entity of (会員フォローイング)MEMBER_FOLLOWING as TABLE. <br>
* とある会員が他の会員をフォローできる。すると、フォローした会員の購入履歴が閲覧できる。
* <pre>
* [primary-key]
* MEMBER_FOLLOWING_ID
*
* [column]
* MEMBER_FOLLOWING_ID, MY_MEMBER_ID, YOUR_MEMBER_ID, FOLLOW_DATETIME
*
* [sequence]
*
*
* [identity]
* MEMBER_FOLLOWING_ID
*
* [version-no]
*
*
* [foreign table]
* MEMBER
*
* [referrer table]
*
*
* [foreign property]
* memberByMyMemberId, memberByYourMemberId
*
* [referrer property]
*
*
* [get/set template]
* /= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
* Long memberFollowingId = entity.getMemberFollowingId();
* Integer myMemberId = entity.getMyMemberId();
* Integer yourMemberId = entity.getYourMemberId();
* java.time.LocalDateTime followDatetime = entity.getFollowDatetime();
* entity.setMemberFollowingId(memberFollowingId);
* entity.setMyMemberId(myMemberId);
* entity.setYourMemberId(yourMemberId);
* entity.setFollowDatetime(followDatetime);
* = = = = = = = = = =/
* </pre>
* @author DBFlute(AutoGenerator)
*/
public abstract class BsMemberFollowing extends AbstractEntity implements DomainEntity {
// ===================================================================================
// Definition
// ==========
/** The serial version UID for object serialization. (Default) */
private static final long serialVersionUID = 1L;
// ===================================================================================
// Attribute
// =========
/** (会員フォローイングID)MEMBER_FOLLOWING_ID: {PK, ID, NotNull, BIGINT(19)} */
protected Long _memberFollowingId;
/** (わたし)MY_MEMBER_ID: {UQ+, NotNull, INT(10), FK to member} */
protected Integer _myMemberId;
/** (あなた)YOUR_MEMBER_ID: {+UQ, IX, NotNull, INT(10), FK to member} */
protected Integer _yourMemberId;
/** (その瞬間)FOLLOW_DATETIME: {IX, NotNull, DATETIME(19)} */
protected java.time.LocalDateTime _followDatetime;
// ===================================================================================
// DB Meta
// =======
/** {@inheritDoc} */
public DBMeta asDBMeta() {
return DBMetaInstanceHandler.findDBMeta(asTableDbName());
}
/** {@inheritDoc} */
public String asTableDbName() {
return "member_following";
}
// ===================================================================================
// Key Handling
// ============
/** {@inheritDoc} */
public boolean hasPrimaryKeyValue() {
if (_memberFollowingId == null) { return false; }
return true;
}
/**
* To be unique by the unique column. <br>
* You can update the entity by the key when entity update (NOT batch update).
* @param myMemberId (わたし): UQ+, NotNull, INT(10), FK to member. (NotNull)
* @param yourMemberId (あなた): +UQ, IX, NotNull, INT(10), FK to member. (NotNull)
*/
public void uniqueBy(Integer myMemberId, Integer yourMemberId) {
__uniqueDrivenProperties.clear();
__uniqueDrivenProperties.addPropertyName("myMemberId");
__uniqueDrivenProperties.addPropertyName("yourMemberId");
setMyMemberId(myMemberId);setYourMemberId(yourMemberId);
}
// ===================================================================================
// Foreign Property
// ================
/** (会員)MEMBER by my MY_MEMBER_ID, named 'memberByMyMemberId'. */
protected OptionalEntity<Member> _memberByMyMemberId;
/**
* [get] (会員)MEMBER by my MY_MEMBER_ID, named 'memberByMyMemberId'. <br>
* Optional: alwaysPresent(), ifPresent().orElse(), get(), ...
* @return The entity of foreign property 'memberByMyMemberId'. (NotNull, EmptyAllowed: when e.g. null FK column, no setupSelect)
*/
public OptionalEntity<Member> getMemberByMyMemberId() {
if (_memberByMyMemberId == null) { _memberByMyMemberId = OptionalEntity.relationEmpty(this, "memberByMyMemberId"); }
return _memberByMyMemberId;
}
/**
* [set] (会員)MEMBER by my MY_MEMBER_ID, named 'memberByMyMemberId'.
* @param memberByMyMemberId The entity of foreign property 'memberByMyMemberId'. (NullAllowed)
*/
public void setMemberByMyMemberId(OptionalEntity<Member> memberByMyMemberId) {
_memberByMyMemberId = memberByMyMemberId;
}
/** (会員)MEMBER by my YOUR_MEMBER_ID, named 'memberByYourMemberId'. */
protected OptionalEntity<Member> _memberByYourMemberId;
/**
* [get] (会員)MEMBER by my YOUR_MEMBER_ID, named 'memberByYourMemberId'. <br>
* Optional: alwaysPresent(), ifPresent().orElse(), get(), ...
* @return The entity of foreign property 'memberByYourMemberId'. (NotNull, EmptyAllowed: when e.g. null FK column, no setupSelect)
*/
public OptionalEntity<Member> getMemberByYourMemberId() {
if (_memberByYourMemberId == null) { _memberByYourMemberId = OptionalEntity.relationEmpty(this, "memberByYourMemberId"); }
return _memberByYourMemberId;
}
/**
* [set] (会員)MEMBER by my YOUR_MEMBER_ID, named 'memberByYourMemberId'.
* @param memberByYourMemberId The entity of foreign property 'memberByYourMemberId'. (NullAllowed)
*/
public void setMemberByYourMemberId(OptionalEntity<Member> memberByYourMemberId) {
_memberByYourMemberId = memberByYourMemberId;
}
// ===================================================================================
// Referrer Property
// =================
protected <ELEMENT> List<ELEMENT> newReferrerList() { // overriding to import
return new ArrayList<ELEMENT>();
}
// ===================================================================================
// Basic Override
// ==============
@Override
protected boolean doEquals(Object obj) {
if (obj instanceof BsMemberFollowing) {
BsMemberFollowing other = (BsMemberFollowing)obj;
if (!xSV(_memberFollowingId, other._memberFollowingId)) { return false; }
return true;
} else {
return false;
}
}
@Override
protected int doHashCode(int initial) {
int hs = initial;
hs = xCH(hs, asTableDbName());
hs = xCH(hs, _memberFollowingId);
return hs;
}
@Override
protected String doBuildStringWithRelation(String li) {
StringBuilder sb = new StringBuilder();
if (_memberByMyMemberId != null && _memberByMyMemberId.isPresent())
{ sb.append(li).append(xbRDS(_memberByMyMemberId, "memberByMyMemberId")); }
if (_memberByYourMemberId != null && _memberByYourMemberId.isPresent())
{ sb.append(li).append(xbRDS(_memberByYourMemberId, "memberByYourMemberId")); }
return sb.toString();
}
protected <ET extends Entity> String xbRDS(org.dbflute.optional.OptionalEntity<ET> et, String name) { // buildRelationDisplayString()
return et.get().buildDisplayString(name, true, true);
}
@Override
protected String doBuildColumnString(String dm) {
StringBuilder sb = new StringBuilder();
sb.append(dm).append(xfND(_memberFollowingId));
sb.append(dm).append(xfND(_myMemberId));
sb.append(dm).append(xfND(_yourMemberId));
sb.append(dm).append(xfND(_followDatetime));
if (sb.length() > dm.length()) {
sb.delete(0, dm.length());
}
sb.insert(0, "{").append("}");
return sb.toString();
}
@Override
protected String doBuildRelationString(String dm) {
StringBuilder sb = new StringBuilder();
if (_memberByMyMemberId != null && _memberByMyMemberId.isPresent())
{ sb.append(dm).append("memberByMyMemberId"); }
if (_memberByYourMemberId != null && _memberByYourMemberId.isPresent())
{ sb.append(dm).append("memberByYourMemberId"); }
if (sb.length() > dm.length()) {
sb.delete(0, dm.length()).insert(0, "(").append(")");
}
return sb.toString();
}
@Override
public MemberFollowing clone() {
return (MemberFollowing)super.clone();
}
// ===================================================================================
// Accessor
// ========
/**
* [get] (会員フォローイングID)MEMBER_FOLLOWING_ID: {PK, ID, NotNull, BIGINT(19)} <br>
* 連番
* @return The value of the column 'MEMBER_FOLLOWING_ID'. (basically NotNull if selected: for the constraint)
*/
public Long getMemberFollowingId() {
checkSpecifiedProperty("memberFollowingId");
return _memberFollowingId;
}
/**
* [set] (会員フォローイングID)MEMBER_FOLLOWING_ID: {PK, ID, NotNull, BIGINT(19)} <br>
* 連番
* @param memberFollowingId The value of the column 'MEMBER_FOLLOWING_ID'. (basically NotNull if update: for the constraint)
*/
public void setMemberFollowingId(Long memberFollowingId) {
registerModifiedProperty("memberFollowingId");
_memberFollowingId = memberFollowingId;
}
/**
* [get] (わたし)MY_MEMBER_ID: {UQ+, NotNull, INT(10), FK to member} <br>
* 気になった人がいて...勇気を振り絞った会員のID。
* @return The value of the column 'MY_MEMBER_ID'. (basically NotNull if selected: for the constraint)
*/
public Integer getMyMemberId() {
checkSpecifiedProperty("myMemberId");
return _myMemberId;
}
/**
* [set] (わたし)MY_MEMBER_ID: {UQ+, NotNull, INT(10), FK to member} <br>
* 気になった人がいて...勇気を振り絞った会員のID。
* @param myMemberId The value of the column 'MY_MEMBER_ID'. (basically NotNull if update: for the constraint)
*/
public void setMyMemberId(Integer myMemberId) {
registerModifiedProperty("myMemberId");
_myMemberId = myMemberId;
}
/**
* [get] (あなた)YOUR_MEMBER_ID: {+UQ, IX, NotNull, INT(10), FK to member} <br>
* いきなりのアクションに...ちょっと心揺らいだ会員のID。
* @return The value of the column 'YOUR_MEMBER_ID'. (basically NotNull if selected: for the constraint)
*/
public Integer getYourMemberId() {
checkSpecifiedProperty("yourMemberId");
return _yourMemberId;
}
/**
* [set] (あなた)YOUR_MEMBER_ID: {+UQ, IX, NotNull, INT(10), FK to member} <br>
* いきなりのアクションに...ちょっと心揺らいだ会員のID。
* @param yourMemberId The value of the column 'YOUR_MEMBER_ID'. (basically NotNull if update: for the constraint)
*/
public void setYourMemberId(Integer yourMemberId) {
registerModifiedProperty("yourMemberId");
_yourMemberId = yourMemberId;
}
/**
* [get] (その瞬間)FOLLOW_DATETIME: {IX, NotNull, DATETIME(19)} <br>
* ふりかえるとちょっと恥ずかしい気持ちになる日時
* @return The value of the column 'FOLLOW_DATETIME'. (basically NotNull if selected: for the constraint)
*/
public java.time.LocalDateTime getFollowDatetime() {
checkSpecifiedProperty("followDatetime");
return _followDatetime;
}
/**
* [set] (その瞬間)FOLLOW_DATETIME: {IX, NotNull, DATETIME(19)} <br>
* ふりかえるとちょっと恥ずかしい気持ちになる日時
* @param followDatetime The value of the column 'FOLLOW_DATETIME'. (basically NotNull if update: for the constraint)
*/
public void setFollowDatetime(java.time.LocalDateTime followDatetime) {
registerModifiedProperty("followDatetime");
_followDatetime = followDatetime;
}
}
| apache-2.0 |
GroupCDG/carbon-logging | src/main/java/com/groupcdg/carbon/logging/interceptor/LoggingMethodInterceptor.java | 29630 | /*
* Copyright 2015 Computing Distribution Group Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.groupcdg.carbon.logging.interceptor;
import static com.groupcdg.carbon.logging.helper.spi.LoggingHelper.logAfter;
import static com.groupcdg.carbon.logging.helper.spi.LoggingHelper.logAfterThrowing;
import static com.groupcdg.carbon.logging.helper.spi.LoggingHelper.logBefore;
import static com.groupcdg.carbon.logging.helper.spi.LoggingUtils.determineParameterNames;
import static com.groupcdg.carbon.logging.helper.spi.LoggingUtils.getLogger;
import static com.groupcdg.carbon.logging.helper.spi.LoggingUtils.inferEffectiveLevel;
import static com.groupcdg.carbon.logging.helper.spi.LoggingUtils.matchMethod;
import static com.groupcdg.carbon.logging.helper.spi.LoggingUtils.obtainToStringStrategy;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
import com.groupcdg.carbon.logging.annotation.Debug;
import com.groupcdg.carbon.logging.annotation.Info;
import com.groupcdg.carbon.logging.annotation.Level;
import com.groupcdg.carbon.logging.annotation.Log;
import com.groupcdg.carbon.logging.annotation.None;
import com.groupcdg.carbon.logging.annotation.Trace;
import com.groupcdg.carbon.logging.annotation.Warn;
import com.groupcdg.carbon.logging.helper.spi.LoggingUtils;
import com.groupcdg.carbon.logging.interceptor.api.MethodInterceptor;
import com.groupcdg.carbon.logging.tostring.ReflectionToStringStrategy;
import com.groupcdg.carbon.logging.tostring.api.ToStringStrategy;
public class LoggingMethodInterceptor implements MethodInterceptor {
private static final LoggingMethodInterceptor ERROR = new LoggingMethodInterceptor(new StaticConfiguration(Level.ERROR));
private static final LoggingMethodInterceptor WARN = new LoggingMethodInterceptor(new StaticConfiguration(Level.WARN));
private static final LoggingMethodInterceptor INFO = new LoggingMethodInterceptor(new StaticConfiguration(Level.INFO));
private static final LoggingMethodInterceptor DEBUG = new LoggingMethodInterceptor(new StaticConfiguration(Level.DEBUG));
private static final LoggingMethodInterceptor TRACE = new LoggingMethodInterceptor(new StaticConfiguration(Level.TRACE));
private static final LoggingMethodInterceptor DEFAULT = new LoggingMethodInterceptor(new StaticConfiguration(Level.DEFAULT));
private static final LoggingMethodInterceptor NONE = new LoggingMethodInterceptor(new StaticConfiguration(Level.NONE));
private Configuration configuration;
@SuppressWarnings("unchecked")
private Class<Throwable>[] exceptionClasses = new Class[0];
@SuppressWarnings("unchecked")
private Class<Throwable>[] ignoredExceptionClasses = new Class[0];
private LoggingMethodInterceptor(LoggingMethodInterceptor.Configuration configuration) {
this.configuration = configuration;
}
@Override
public Object interceptBefore(Object proxy, Method method, Object[] args, Object realTarget) {
boolean matchedMethod = configuration.isMatchedMethod(method);
if (matchedMethod) {
Class<?> actualType = LoggingUtils.determineActualType(proxy);
String[] paramNames = determineParameterNames(method.getParameters());
logBefore(getLogger(actualType), actualType, method.getName(), paramNames, args, configuration.getLevelToUseBefore(method), configuration.isLogArgs(method), configuration.getToStringStrategy(method), configuration.isIncludeStartAndEndMarkers(method));
}
return null;
}
@Override
public void interceptAfter(Object proxy, Method method, Object[] args, Object realTarget, Object retObject,
Object interceptBefore) {
boolean matchedMethod = configuration.isMatchedMethod(method);
if (matchedMethod) {
Class<?> actualType = LoggingUtils.determineActualType(proxy);
logAfter(getLogger(actualType), actualType, method.getName(), method.getReturnType(), configuration.getLevelToUseBefore(method), configuration.isLogReturning(method), configuration.isLogReturnValue(method), retObject, configuration.getToStringStrategy(method), configuration.isIncludeStartAndEndMarkers(method));
}
}
@Override
public void interceptAfterThrowing(Object proxy, Method method, Object[] args, Object realTarget, Throwable cause,
Object interceptBeforeReturnObject) {
boolean matchedMethod = configuration.isMatchedMethod(method);
if (matchedMethod) {
Class<?> actualType = LoggingUtils.determineActualType(proxy);
String[] paramNames = determineParameterNames(method.getParameters());
logAfterThrowing(getLogger(actualType), actualType, method.getName(), paramNames, args, configuration.getLevelToUseAfterThrowing(method), configuration.isLogExceptions(method), exceptionClasses, ignoredExceptionClasses, cause, configuration.isPrintStackTrace(method), configuration.getToStringStrategy(method), configuration.isIncludeStartAndEndMarkers(method));
}
}
public static LoggingMethodInterceptor of(Level level, ToStringStrategy toStringStrategy, boolean logArgs, boolean logReturning, boolean logReturnValue, boolean logExceptions, boolean includeStartAndEndMarkers) {
return new LoggingMethodInterceptor(new StaticConfiguration(level, toStringStrategy, logArgs, logReturning, logReturnValue, logExceptions, includeStartAndEndMarkers));
}
public static LoggingMethodInterceptor of(Level level, ToStringStrategy toStringStrategy, boolean logArgs, boolean logReturning, boolean logReturnValue, boolean logExceptions, boolean includeStartAndEndMarkers, boolean printStackTrace, Method... matchMethods) {
return new LoggingMethodInterceptor(new StaticConfiguration(level, toStringStrategy, logArgs, logReturning, logReturnValue, logExceptions, includeStartAndEndMarkers, printStackTrace, matchMethods));
}
public static LoggingMethodInterceptor of(Level level) {
switch(level) {
case ERROR:
return LoggingMethodInterceptor.ERROR;
case WARN:
return LoggingMethodInterceptor.WARN;
case INFO:
return LoggingMethodInterceptor.INFO;
case DEBUG:
return LoggingMethodInterceptor.DEBUG;
case TRACE:
return LoggingMethodInterceptor.TRACE;
case DEFAULT:
return LoggingMethodInterceptor.DEFAULT;
case NONE:
return LoggingMethodInterceptor.NONE;
default:
throw new IllegalStateException("Level " + level + " was not found");
}
}
public static LoggingMethodInterceptor of(Class<?> targetAnnotatedClass, Annotation annotation) {
return new LoggingMethodInterceptor(new TargetAnnotationConfiguration(targetAnnotatedClass, annotation));
}
public static interface Configuration {
public Level getLevelToUseBefore(Method method);
public Level getLevelToUseAfter(Method method);
public Level getLevelToUseAfterThrowing(Method method);
public ToStringStrategy getToStringStrategy(Method method);
public boolean isLogArgs(Method method);
public boolean isLogReturning(Method method);
public boolean isLogReturnValue(Method method);
public boolean isLogExceptions(Method method);
public boolean isIncludeStartAndEndMarkers(Method method);
public boolean isPrintStackTrace(Method method);
public boolean isMatchedMethod(Method method);
}
private static class StaticConfiguration implements Configuration {
private Level levelToUse;
private ToStringStrategy toStringStrategy = new ReflectionToStringStrategy();
private boolean logArgs = true;
private boolean logReturning = true;
private boolean logReturnValue = true;
private boolean logExceptions = true;
private boolean includeStartAndEndMarkers = true;
private boolean printStackTrace = false;
private Method[] matchMethods = null;
private StaticConfiguration(Level levelToUse) {
this.levelToUse = levelToUse;
}
private StaticConfiguration(Level level, ToStringStrategy toStringStrategy, boolean logArgs, boolean logReturning, boolean logReturnValue, boolean logExceptions, boolean includeStartAndEndMarkers) {
this.levelToUse = level;
this.toStringStrategy = toStringStrategy;
this.logArgs = logArgs;
this.logReturning = logReturning;
this.logReturnValue = logReturnValue;
this.logExceptions = logExceptions;
this.includeStartAndEndMarkers = includeStartAndEndMarkers;
}
private StaticConfiguration(Level level, ToStringStrategy toStringStrategy, boolean logArgs, boolean logReturning, boolean logReturnValue, boolean logExceptions, boolean includeStartAndEndMarkers, boolean printStackTrace, Method... matchMethods) {
this.levelToUse = level;
this.toStringStrategy = toStringStrategy;
this.logArgs = logArgs;
this.logReturning = logReturning;
this.logReturnValue = logReturnValue;
this.logExceptions = logExceptions;
this.includeStartAndEndMarkers = includeStartAndEndMarkers;
this.printStackTrace = printStackTrace;
this.matchMethods = matchMethods;
}
@Override
public Level getLevelToUseBefore(Method method) {
return levelToUse;
}
@Override
public Level getLevelToUseAfter(Method method) {
return levelToUse;
}
@Override
public Level getLevelToUseAfterThrowing(Method method) {
return levelToUse;
}
@Override
public ToStringStrategy getToStringStrategy(Method method) {
return toStringStrategy;
}
@Override
public boolean isIncludeStartAndEndMarkers(Method method) {
return includeStartAndEndMarkers;
}
@Override
public boolean isLogArgs(Method method) {
return logArgs;
}
@Override
public boolean isLogReturning(Method method) {
return logReturning;
}
@Override
public boolean isLogReturnValue(Method method) {
return logReturnValue;
}
@Override
public boolean isLogExceptions(Method method) {
return logExceptions;
}
@Override
public boolean isPrintStackTrace(Method method) {
return printStackTrace;
}
@Override
public boolean isMatchedMethod(Method method) {
return matchMethod(method, matchMethods);
}
}
private static class TargetAnnotationConfiguration implements Configuration {
private Class<?> targetType;
private Annotation annotation;
private TargetAnnotationConfiguration(Class<?> targetType, Annotation annotation) {
this.targetType = targetType;
this.annotation = annotation;
}
private Log findLog(Class<?> targetType, Method method) {
Log annotation = method.getAnnotation(Log.class);
return annotation;
}
private com.groupcdg.carbon.logging.annotation.Error findError(Class<?> targetType, Method method) {
com.groupcdg.carbon.logging.annotation.Error annotation = method.getAnnotation(com.groupcdg.carbon.logging.annotation.Error.class);
return annotation;
}
private Warn findWarn(Class<?> targetType, Method method) {
Warn annotation = method.getAnnotation(Warn.class);
return annotation;
}
private Info findInfo(Class<?> targetType, Method method) {
Info annotation = method.getAnnotation(Info.class);
return annotation;
}
private Debug findDebug(Class<?> targetType, Method method) {
Debug annotation = method.getAnnotation(Debug.class);
return annotation;
}
private Trace findTrace(Class<?> targetType, Method method) {
Trace annotation = method.getAnnotation(Trace.class);
return annotation;
}
private None findNone(Class<?> targetType, Method method) {
None annotation = method.getAnnotation(None.class);
return annotation;
}
@Override
public ToStringStrategy getToStringStrategy(Method method) {
if ((annotation.annotationType() == Log.class)) {
Log ann = findLog(targetType, method);
return obtainToStringStrategy(ann.toStringStrategy(), ann.toStringStrategyStyleName());
} else if ((annotation.annotationType() == com.groupcdg.carbon.logging.annotation.Error.class) && (method.getAnnotation(com.groupcdg.carbon.logging.annotation.Error.class) != null)) {
com.groupcdg.carbon.logging.annotation.Error ann = findError(targetType, method);
return obtainToStringStrategy(ann.toStringStrategy(), ann.toStringStrategyStyleName());
} else if ((annotation.annotationType() == Warn.class) && (method.getAnnotation(Warn.class) != null)) {
Warn ann = findWarn(targetType, method);
return obtainToStringStrategy(ann.toStringStrategy(), ann.toStringStrategyStyleName());
} else if ((annotation.annotationType() == Info.class) && (method.getAnnotation(Info.class) != null)) {
Info ann = findInfo(targetType, method);
return obtainToStringStrategy(ann.toStringStrategy(), ann.toStringStrategyStyleName());
} else if ((annotation.annotationType() == Debug.class) && (method.getAnnotation(Debug.class) != null)) {
Debug ann = findDebug(targetType, method);
return obtainToStringStrategy(ann.toStringStrategy(), ann.toStringStrategyStyleName());
} else if ((annotation.annotationType() == Trace.class) && (method.getAnnotation(Trace.class) != null)) {
Trace ann = findTrace(targetType, method);
return obtainToStringStrategy(ann.toStringStrategy(), ann.toStringStrategyStyleName());
} else if ((annotation.annotationType() == None.class) && (method.getAnnotation(None.class) != null)) {
None ann = findNone(targetType, method);
return obtainToStringStrategy(ann.toStringStrategy(), ann.toStringStrategyStyleName());
} else {
throw new IllegalStateException("Invalid annotation type matched: " + annotation.annotationType());
}
}
@Override
public boolean isIncludeStartAndEndMarkers(Method method) {
if ((annotation.annotationType() == Log.class)) {
Log ann = findLog(targetType, method);
return ann.includeStartAndEndMarkers();
} else if ((annotation.annotationType() == com.groupcdg.carbon.logging.annotation.Error.class) && (method.getAnnotation(com.groupcdg.carbon.logging.annotation.Error.class) != null)) {
com.groupcdg.carbon.logging.annotation.Error ann = findError(targetType, method);
return ann.includeStartAndEndMarkers();
} else if ((annotation.annotationType() == Warn.class) && (method.getAnnotation(Warn.class) != null)) {
Warn ann = findWarn(targetType, method);
return ann.includeStartAndEndMarkers();
} else if ((annotation.annotationType() == Info.class) && (method.getAnnotation(Info.class) != null)) {
Info ann = findInfo(targetType, method);
return ann.includeStartAndEndMarkers();
} else if ((annotation.annotationType() == Debug.class) && (method.getAnnotation(Debug.class) != null)) {
Debug ann = findDebug(targetType, method);
return ann.includeStartAndEndMarkers();
} else if ((annotation.annotationType() == Trace.class) && (method.getAnnotation(Trace.class) != null)) {
Trace ann = findTrace(targetType, method);
return ann.includeStartAndEndMarkers();
} else if ((annotation.annotationType() == None.class) && (method.getAnnotation(None.class) != null)) {
None ann = findNone(targetType, method);
return ann.includeStartAndEndMarkers();
} else {
throw new IllegalStateException("Invalid annotation type matched: " + annotation.annotationType());
}
}
@Override
public boolean isLogArgs(Method method) {
if ((annotation.annotationType() == Log.class)) {
Log ann = findLog(targetType, method);
return ann.logArguments();
} else if ((annotation.annotationType() == com.groupcdg.carbon.logging.annotation.Error.class) && (method.getAnnotation(com.groupcdg.carbon.logging.annotation.Error.class) != null)) {
com.groupcdg.carbon.logging.annotation.Error ann = findError(targetType, method);
return ann.logArguments();
} else if ((annotation.annotationType() == Warn.class) && (method.getAnnotation(Warn.class) != null)) {
Warn ann = findWarn(targetType, method);
return ann.logArguments();
} else if ((annotation.annotationType() == Info.class) && (method.getAnnotation(Info.class) != null)) {
Info ann = findInfo(targetType, method);
return ann.logArguments();
} else if ((annotation.annotationType() == Debug.class) && (method.getAnnotation(Debug.class) != null)) {
Debug ann = findDebug(targetType, method);
return ann.logArguments();
} else if ((annotation.annotationType() == Trace.class) && (method.getAnnotation(Trace.class) != null)) {
Trace ann = findTrace(targetType, method);
return ann.logArguments();
} else if ((annotation.annotationType() == None.class) && (method.getAnnotation(None.class) != null)) {
None ann = findNone(targetType, method);
return ann.logArguments();
} else {
throw new IllegalStateException("Invalid annotation type matched: " + annotation.annotationType());
}
}
@Override
public boolean isLogReturning(Method method) {
if ((annotation.annotationType() == Log.class)) {
Log ann = findLog(targetType, method);
return ann.logAfter();
} else if ((annotation.annotationType() == com.groupcdg.carbon.logging.annotation.Error.class) && (method.getAnnotation(com.groupcdg.carbon.logging.annotation.Error.class) != null)) {
com.groupcdg.carbon.logging.annotation.Error ann = findError(targetType, method);
return ann.logAfter();
} else if ((annotation.annotationType() == Warn.class) && (method.getAnnotation(Warn.class) != null)) {
Warn ann = findWarn(targetType, method);
return ann.logAfter();
} else if ((annotation.annotationType() == Info.class) && (method.getAnnotation(Info.class) != null)) {
Info ann = findInfo(targetType, method);
return ann.logAfter();
} else if ((annotation.annotationType() == Debug.class) && (method.getAnnotation(Debug.class) != null)) {
Debug ann = findDebug(targetType, method);
return ann.logAfter();
} else if ((annotation.annotationType() == Trace.class) && (method.getAnnotation(Trace.class) != null)) {
Trace ann = findTrace(targetType, method);
return ann.logAfter();
} else if ((annotation.annotationType() == None.class) && (method.getAnnotation(None.class) != null)) {
None ann = findNone(targetType, method);
return ann.logAfter();
} else {
throw new IllegalStateException("Invalid annotation type matched: " + annotation.annotationType());
}
}
@Override
public boolean isLogReturnValue(Method method) {
if ((annotation.annotationType() == Log.class)) {
Log ann = findLog(targetType, method);
return ann.logReturnValue();
} else if ((annotation.annotationType() == com.groupcdg.carbon.logging.annotation.Error.class) && (method.getAnnotation(com.groupcdg.carbon.logging.annotation.Error.class) != null)) {
com.groupcdg.carbon.logging.annotation.Error ann = findError(targetType, method);
return ann.logReturnValue();
} else if ((annotation.annotationType() == Warn.class) && (method.getAnnotation(Warn.class) != null)) {
Warn ann = findWarn(targetType, method);
return ann.logReturnValue();
} else if ((annotation.annotationType() == Info.class) && (method.getAnnotation(Info.class) != null)) {
Info ann = findInfo(targetType, method);
return ann.logReturnValue();
} else if ((annotation.annotationType() == Debug.class) && (method.getAnnotation(Debug.class) != null)) {
Debug ann = findDebug(targetType, method);
return ann.logReturnValue();
} else if ((annotation.annotationType() == Trace.class) && (method.getAnnotation(Trace.class) != null)) {
Trace ann = findTrace(targetType, method);
return ann.logReturnValue();
} else if ((annotation.annotationType() == None.class) && (method.getAnnotation(None.class) != null)) {
None ann = findNone(targetType, method);
return ann.logReturnValue();
} else {
throw new IllegalStateException("Invalid annotation type matched: " + annotation.annotationType());
}
}
@Override
public boolean isLogExceptions(Method method) {
if ((annotation.annotationType() == Log.class)) {
Log ann = findLog(targetType, method);
return ann.logExceptions();
} else if ((annotation.annotationType() == com.groupcdg.carbon.logging.annotation.Error.class) && (method.getAnnotation(com.groupcdg.carbon.logging.annotation.Error.class) != null)) {
com.groupcdg.carbon.logging.annotation.Error ann = findError(targetType, method);
return ann.logExceptions();
} else if ((annotation.annotationType() == Warn.class) && (method.getAnnotation(Warn.class) != null)) {
Warn ann = findWarn(targetType, method);
return ann.logExceptions();
} else if ((annotation.annotationType() == Info.class) && (method.getAnnotation(Info.class) != null)) {
Info ann = findInfo(targetType, method);
return ann.logExceptions();
} else if ((annotation.annotationType() == Debug.class) && (method.getAnnotation(Debug.class) != null)) {
Debug ann = findDebug(targetType, method);
return ann.logExceptions();
} else if ((annotation.annotationType() == Trace.class) && (method.getAnnotation(Trace.class) != null)) {
Trace ann = findTrace(targetType, method);
return ann.logExceptions();
} else if ((annotation.annotationType() == None.class) && (method.getAnnotation(None.class) != null)) {
None ann = findNone(targetType, method);
return ann.logExceptions();
} else {
throw new IllegalStateException("Invalid annotation type matched: " + annotation.annotationType());
}
}
@Override
public boolean isPrintStackTrace(Method method) {
if ((annotation.annotationType() == Log.class)) {
Log ann = findLog(targetType, method);
return ann.printStackTrace();
} else if ((annotation.annotationType() == com.groupcdg.carbon.logging.annotation.Error.class) && (method.getAnnotation(com.groupcdg.carbon.logging.annotation.Error.class) != null)) {
com.groupcdg.carbon.logging.annotation.Error ann = findError(targetType, method);
return ann.printStackTrace();
} else if ((annotation.annotationType() == Warn.class) && (method.getAnnotation(Warn.class) != null)) {
Warn ann = findWarn(targetType, method);
return ann.printStackTrace();
} else if ((annotation.annotationType() == Info.class) && (method.getAnnotation(Info.class) != null)) {
Info ann = findInfo(targetType, method);
return ann.printStackTrace();
} else if ((annotation.annotationType() == Debug.class) && (method.getAnnotation(Debug.class) != null)) {
Debug ann = findDebug(targetType, method);
return ann.printStackTrace();
} else if ((annotation.annotationType() == Trace.class) && (method.getAnnotation(Trace.class) != null)) {
Trace ann = findTrace(targetType, method);
return ann.printStackTrace();
} else if ((annotation.annotationType() == None.class) && (method.getAnnotation(None.class) != null)) {
None ann = findNone(targetType, method);
return ann.printStackTrace();
} else {
throw new IllegalStateException("Invalid annotation type matched: " + annotation.annotationType());
}
}
@Override
public boolean isMatchedMethod(Method method) {
if ((annotation.annotationType() == Log.class) && (method.getAnnotation(Log.class) != null)) {
return true;
} else if ((annotation.annotationType() == com.groupcdg.carbon.logging.annotation.Error.class) && (method.getAnnotation(com.groupcdg.carbon.logging.annotation.Error.class) != null)) {
return true;
} else if ((annotation.annotationType() == Warn.class) && (method.getAnnotation(Warn.class) != null)) {
return true;
} else if ((annotation.annotationType() == Info.class) && (method.getAnnotation(Info.class) != null)) {
return true;
} else if ((annotation.annotationType() == Debug.class) && (method.getAnnotation(Debug.class) != null)) {
return true;
} else if ((annotation.annotationType() == Trace.class) && (method.getAnnotation(Trace.class) != null)) {
return true;
} else if ((annotation.annotationType() == None.class) && (method.getAnnotation(None.class) != null)) {
return true;
} else {
return false;
}
}
@Override
public Level getLevelToUseBefore(Method method) {
if ((annotation.annotationType() == Log.class)) {
Log ann = findLog(targetType, method);
return inferEffectiveLevel(ann, ann.level(), ann.beforeLevel());
} else if ((annotation.annotationType() == com.groupcdg.carbon.logging.annotation.Error.class) && (method.getAnnotation(com.groupcdg.carbon.logging.annotation.Error.class) != null)) {
com.groupcdg.carbon.logging.annotation.Error ann = findError(targetType, method);
return inferEffectiveLevel(ann, Level.ERROR, ann.beforeLevel());
} else if ((annotation.annotationType() == Warn.class) && (method.getAnnotation(Warn.class) != null)) {
Warn ann = findWarn(targetType, method);
return inferEffectiveLevel(ann, Level.WARN, ann.beforeLevel());
} else if ((annotation.annotationType() == Info.class) && (method.getAnnotation(Info.class) != null)) {
Info ann = findInfo(targetType, method);
return inferEffectiveLevel(ann, Level.INFO, ann.beforeLevel());
} else if ((annotation.annotationType() == Debug.class) && (method.getAnnotation(Debug.class) != null)) {
Debug ann = findDebug(targetType, method);
return inferEffectiveLevel(ann, Level.DEBUG, ann.beforeLevel());
} else if ((annotation.annotationType() == Trace.class) && (method.getAnnotation(Trace.class) != null)) {
Trace ann = findTrace(targetType, method);
return inferEffectiveLevel(ann, Level.TRACE, ann.beforeLevel());
} else if ((annotation.annotationType() == None.class) && (method.getAnnotation(None.class) != null)) {
None ann = findNone(targetType, method);
return inferEffectiveLevel(ann, Level.NONE, ann.beforeLevel());
} else {
throw new IllegalStateException("Invalid annotation type matched: " + annotation.annotationType());
}
}
@Override
public Level getLevelToUseAfter(Method method) {
if ((annotation.annotationType() == Log.class)) {
Log ann = findLog(targetType, method);
return inferEffectiveLevel(ann, ann.level(), ann.afterLevel());
} else if ((annotation.annotationType() == com.groupcdg.carbon.logging.annotation.Error.class) && (method.getAnnotation(com.groupcdg.carbon.logging.annotation.Error.class) != null)) {
com.groupcdg.carbon.logging.annotation.Error ann = findError(targetType, method);
return inferEffectiveLevel(ann, Level.ERROR, ann.afterLevel());
} else if ((annotation.annotationType() == Warn.class) && (method.getAnnotation(Warn.class) != null)) {
Warn ann = findWarn(targetType, method);
return inferEffectiveLevel(ann, Level.WARN, ann.afterLevel());
} else if ((annotation.annotationType() == Info.class) && (method.getAnnotation(Info.class) != null)) {
Info ann = findInfo(targetType, method);
return inferEffectiveLevel(ann, Level.INFO, ann.afterLevel());
} else if ((annotation.annotationType() == Debug.class) && (method.getAnnotation(Debug.class) != null)) {
Debug ann = findDebug(targetType, method);
return inferEffectiveLevel(ann, Level.DEBUG, ann.afterLevel());
} else if ((annotation.annotationType() == Trace.class) && (method.getAnnotation(Trace.class) != null)) {
Trace ann = findTrace(targetType, method);
return inferEffectiveLevel(ann, Level.TRACE, ann.afterLevel());
} else if ((annotation.annotationType() == None.class) && (method.getAnnotation(None.class) != null)) {
None ann = findNone(targetType, method);
return inferEffectiveLevel(ann, Level.NONE, ann.afterLevel());
} else {
throw new IllegalStateException("Invalid annotation type matched: " + annotation.annotationType());
}
}
@Override
public Level getLevelToUseAfterThrowing(Method method) {
if ((annotation.annotationType() == Log.class)) {
Log ann = findLog(targetType, method);
return inferEffectiveLevel(ann, ann.level(), ann.exceptionLevel());
} else if ((annotation.annotationType() == com.groupcdg.carbon.logging.annotation.Error.class) && (method.getAnnotation(com.groupcdg.carbon.logging.annotation.Error.class) != null)) {
com.groupcdg.carbon.logging.annotation.Error ann = findError(targetType, method);
return inferEffectiveLevel(ann, Level.ERROR, ann.exceptionLevel());
} else if ((annotation.annotationType() == Warn.class) && (method.getAnnotation(Warn.class) != null)) {
Warn ann = findWarn(targetType, method);
return inferEffectiveLevel(ann, Level.WARN, ann.exceptionLevel());
} else if ((annotation.annotationType() == Info.class) && (method.getAnnotation(Info.class) != null)) {
Info ann = findInfo(targetType, method);
return inferEffectiveLevel(ann, Level.INFO, ann.exceptionLevel());
} else if ((annotation.annotationType() == Debug.class) && (method.getAnnotation(Debug.class) != null)) {
Debug ann = findDebug(targetType, method);
return inferEffectiveLevel(ann, Level.DEBUG, ann.exceptionLevel());
} else if ((annotation.annotationType() == Trace.class) && (method.getAnnotation(Trace.class) != null)) {
Trace ann = findTrace(targetType, method);
return inferEffectiveLevel(ann, Level.TRACE, ann.exceptionLevel());
} else if ((annotation.annotationType() == None.class) && (method.getAnnotation(None.class) != null)) {
None ann = findNone(targetType, method);
return inferEffectiveLevel(ann, Level.NONE, ann.exceptionLevel());
} else {
throw new IllegalStateException("Invalid annotation type matched: " + annotation.annotationType());
}
}
}
} | apache-2.0 |
mchllngr/AndroidBoilerplate | app/src/main/java/de/mchllngr/androidboilerplate/injection/ApplicationComponent.java | 372 | package de.mchllngr.androidboilerplate.injection;
import javax.inject.Singleton;
import dagger.Component;
import de.mchllngr.androidboilerplate.module.main.MainPresenter;
/**
* Dagger2-component for the whole application.
*/
@Singleton
@Component(modules = ApplicationModule.class)
public interface ApplicationComponent {
void inject(MainPresenter presenter);
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-ec2/src/main/java/com/amazonaws/services/ec2/model/transform/DeleteNatGatewayResultStaxUnmarshaller.java | 2480 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ec2.model.transform;
import javax.xml.stream.events.XMLEvent;
import javax.annotation.Generated;
import com.amazonaws.services.ec2.model.*;
import com.amazonaws.transform.Unmarshaller;
import com.amazonaws.transform.StaxUnmarshallerContext;
import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*;
/**
* DeleteNatGatewayResult StAX Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DeleteNatGatewayResultStaxUnmarshaller implements Unmarshaller<DeleteNatGatewayResult, StaxUnmarshallerContext> {
public DeleteNatGatewayResult unmarshall(StaxUnmarshallerContext context) throws Exception {
DeleteNatGatewayResult deleteNatGatewayResult = new DeleteNatGatewayResult();
int originalDepth = context.getCurrentDepth();
int targetDepth = originalDepth + 1;
if (context.isStartOfDocument())
targetDepth += 1;
while (true) {
XMLEvent xmlEvent = context.nextEvent();
if (xmlEvent.isEndDocument())
return deleteNatGatewayResult;
if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) {
if (context.testExpression("natGatewayId", targetDepth)) {
deleteNatGatewayResult.setNatGatewayId(StringStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
} else if (xmlEvent.isEndElement()) {
if (context.getCurrentDepth() < originalDepth) {
return deleteNatGatewayResult;
}
}
}
}
private static DeleteNatGatewayResultStaxUnmarshaller instance;
public static DeleteNatGatewayResultStaxUnmarshaller getInstance() {
if (instance == null)
instance = new DeleteNatGatewayResultStaxUnmarshaller();
return instance;
}
}
| apache-2.0 |
mandusm/presto | presto-spi/src/main/java/com/facebook/presto/spi/block/AbstractSingleMapBlock.java | 7257 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.spi.block;
import io.airlift.slice.Slice;
import java.util.List;
public abstract class AbstractSingleMapBlock
implements Block
{
private final int offset;
private final Block keyBlock;
private final Block valueBlock;
public AbstractSingleMapBlock(int offset, Block keyBlock, Block valueBlock)
{
this.offset = offset;
this.keyBlock = keyBlock;
this.valueBlock = valueBlock;
}
private int getAbsolutePosition(int position)
{
if (position < 0 || position >= getPositionCount()) {
throw new IllegalArgumentException("position is not valid");
}
return position + offset;
}
@Override
public boolean isNull(int position)
{
position = getAbsolutePosition(position);
if (position % 2 == 0) {
if (keyBlock.isNull(position / 2)) {
throw new IllegalStateException("Map key is null");
}
return false;
}
else {
return valueBlock.isNull(position / 2);
}
}
@Override
public byte getByte(int position, int offset)
{
position = getAbsolutePosition(position);
if (position % 2 == 0) {
return keyBlock.getByte(position / 2, offset);
}
else {
return valueBlock.getByte(position / 2, offset);
}
}
@Override
public short getShort(int position, int offset)
{
position = getAbsolutePosition(position);
if (position % 2 == 0) {
return keyBlock.getShort(position / 2, offset);
}
else {
return valueBlock.getShort(position / 2, offset);
}
}
@Override
public int getInt(int position, int offset)
{
position = getAbsolutePosition(position);
if (position % 2 == 0) {
return keyBlock.getInt(position / 2, offset);
}
else {
return valueBlock.getInt(position / 2, offset);
}
}
@Override
public long getLong(int position, int offset)
{
position = getAbsolutePosition(position);
if (position % 2 == 0) {
return keyBlock.getLong(position / 2, offset);
}
else {
return valueBlock.getLong(position / 2, offset);
}
}
@Override
public Slice getSlice(int position, int offset, int length)
{
position = getAbsolutePosition(position);
if (position % 2 == 0) {
return keyBlock.getSlice(position / 2, offset, length);
}
else {
return valueBlock.getSlice(position / 2, offset, length);
}
}
@Override
public int getSliceLength(int position)
{
position = getAbsolutePosition(position);
if (position % 2 == 0) {
return keyBlock.getSliceLength(position / 2);
}
else {
return valueBlock.getSliceLength(position / 2);
}
}
@Override
public boolean bytesEqual(int position, int offset, Slice otherSlice, int otherOffset, int length)
{
position = getAbsolutePosition(position);
if (position % 2 == 0) {
return keyBlock.bytesEqual(position / 2, offset, otherSlice, otherOffset, length);
}
else {
return valueBlock.bytesEqual(position / 2, offset, otherSlice, otherOffset, length);
}
}
@Override
public int bytesCompare(int position, int offset, int length, Slice otherSlice, int otherOffset, int otherLength)
{
position = getAbsolutePosition(position);
if (position % 2 == 0) {
return keyBlock.bytesCompare(position / 2, offset, length, otherSlice, otherOffset, otherLength);
}
else {
return valueBlock.bytesCompare(position / 2, offset, length, otherSlice, otherOffset, otherLength);
}
}
@Override
public void writeBytesTo(int position, int offset, int length, BlockBuilder blockBuilder)
{
position = getAbsolutePosition(position);
if (position % 2 == 0) {
keyBlock.writeBytesTo(position / 2, offset, length, blockBuilder);
}
else {
valueBlock.writeBytesTo(position / 2, offset, length, blockBuilder);
}
}
@Override
public boolean equals(int position, int offset, Block otherBlock, int otherPosition, int otherOffset, int length)
{
position = getAbsolutePosition(position);
if (position % 2 == 0) {
return keyBlock.equals(position / 2, offset, otherBlock, otherPosition, otherOffset, length);
}
else {
return valueBlock.equals(position / 2, offset, otherBlock, otherPosition, otherOffset, length);
}
}
@Override
public long hash(int position, int offset, int length)
{
position = getAbsolutePosition(position);
if (position % 2 == 0) {
return keyBlock.hash(position / 2, offset, length);
}
else {
return valueBlock.hash(position / 2, offset, length);
}
}
@Override
public <T> T getObject(int position, Class<T> clazz)
{
position = getAbsolutePosition(position);
if (position % 2 == 0) {
return keyBlock.getObject(position / 2, clazz);
}
else {
return valueBlock.getObject(position / 2, clazz);
}
}
@Override
public void writePositionTo(int position, BlockBuilder blockBuilder)
{
position = getAbsolutePosition(position);
if (position % 2 == 0) {
keyBlock.writePositionTo(position / 2, blockBuilder);
}
else {
valueBlock.writePositionTo(position / 2, blockBuilder);
}
}
@Override
public Block getSingleValueBlock(int position)
{
position = getAbsolutePosition(position);
if (position % 2 == 0) {
return keyBlock.getSingleValueBlock(position / 2);
}
else {
return valueBlock.getSingleValueBlock(position / 2);
}
}
@Override
public long getRegionSizeInBytes(int position, int length)
{
throw new UnsupportedOperationException();
}
@Override
public Block copyPositions(List<Integer> positions)
{
throw new UnsupportedOperationException();
}
@Override
public Block getRegion(int positionOffset, int length)
{
throw new UnsupportedOperationException();
}
@Override
public Block copyRegion(int position, int length)
{
throw new UnsupportedOperationException();
}
}
| apache-2.0 |
oehme/analysing-gradle-performance | my-app/src/main/java/org/gradle/test/performance/mediummonolithicjavaproject/p290/Production5812.java | 1963 | package org.gradle.test.performance.mediummonolithicjavaproject.p290;
public class Production5812 {
private Production5809 property0;
public Production5809 getProperty0() {
return property0;
}
public void setProperty0(Production5809 value) {
property0 = value;
}
private Production5810 property1;
public Production5810 getProperty1() {
return property1;
}
public void setProperty1(Production5810 value) {
property1 = value;
}
private Production5811 property2;
public Production5811 getProperty2() {
return property2;
}
public void setProperty2(Production5811 value) {
property2 = value;
}
private String property3;
public String getProperty3() {
return property3;
}
public void setProperty3(String value) {
property3 = value;
}
private String property4;
public String getProperty4() {
return property4;
}
public void setProperty4(String value) {
property4 = value;
}
private String property5;
public String getProperty5() {
return property5;
}
public void setProperty5(String value) {
property5 = value;
}
private String property6;
public String getProperty6() {
return property6;
}
public void setProperty6(String value) {
property6 = value;
}
private String property7;
public String getProperty7() {
return property7;
}
public void setProperty7(String value) {
property7 = value;
}
private String property8;
public String getProperty8() {
return property8;
}
public void setProperty8(String value) {
property8 = value;
}
private String property9;
public String getProperty9() {
return property9;
}
public void setProperty9(String value) {
property9 = value;
}
} | apache-2.0 |
Clinical3PO/Platform | dev/c3po-angularjs-visualization/rule/src/main/java/org/clinical3po/backendservices/rule/dependency/GetDependencyRule.java | 775 | /*
* Copyright 2015 Clinical Personalized Pragmatic Predictions of Outcomes.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.clinical3po.backendservices.rule.dependency;
/**
* Created by w.ding on 3/9/2015.
*/
public class GetDependencyRule {
}
| apache-2.0 |
RestNEXT/restnext | restnext-util/src/main/java/org/restnext/util/FileUtils.java | 4283 | /*
* Copyright (C) 2016 Thiago Gutenberg Carvalho da Costa
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.restnext.util;
import java.io.IOException;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.PathMatcher;
import java.nio.file.Paths;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.Collections;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
import java.util.function.BiPredicate;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Collectors;
/**
* Created by thiago on 10/11/16.
*/
public final class FileUtils {
public static final String DEFAULT_GLOB = "*";
private FileUtils() {
throw new AssertionError();
}
public static Set<Path> listChildren(Path directory) {
return listChildren(directory, DEFAULT_GLOB);
}
/**
* List children path from the provided directory and the glob filter.
*
* @param directory the directory
* @param glob the glob filter
* @return a set of found path children
*/
public static Set<Path> listChildren(Path directory, String glob) {
if (directory == null || !Files.isDirectory(directory)) {
return Collections.emptySet();
}
glob = glob == null || glob.trim().isEmpty() ? DEFAULT_GLOB : glob;
Set<Path> children = new HashSet<>();
try (DirectoryStream<Path> childrenStream = Files.newDirectoryStream(directory, glob)) {
childrenStream.forEach(children::add);
return Collections.unmodifiableSet(new HashSet<>(children));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public static Set<Path> deepListChildren(Path directory) {
return deepListChildren(directory, DEFAULT_GLOB);
}
/**
* Deep list children path from the provided directory and the glob filter.
*
* @param directory the directory
* @param glob the glob filter
* @return a set of found path children
*/
public static Set<Path> deepListChildren(final Path directory, String glob) {
if (directory == null || !Files.isDirectory(directory)) {
return Collections.emptySet();
}
glob = glob == null || glob.trim().isEmpty() ? DEFAULT_GLOB : glob;
final PathMatcher pathMatcher = directory.getFileSystem()
.getPathMatcher("glob:" + glob);
BiPredicate<Path, BasicFileAttributes> filter = DEFAULT_GLOB.equals(glob.trim())
? (path, basicFileAttributes) -> {
try {
return !Files.isSameFile(directory, path);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
: (path, basicFileAttributes) -> /*basicFileAttributes.isRegularFile() &&*/
pathMatcher.matches(path.getFileName());
try {
Set<Path> children = Files.find(directory, Integer.MAX_VALUE, filter)
.collect(Collectors.toSet());
return Collections.unmodifiableSet(new HashSet<>(children));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Remove the extension information from the path.
*
* @param file the path
* @return the path without extension
*/
public static Path removeExtension(Path file) {
Predicate<Path> filter = path -> {
try {
return !Files.isDirectory(path) && !Files.isHidden(path);
} catch (IOException e) {
throw new RuntimeException(e);
}
};
Function<Path, Path> removeExtensionFunction = path -> {
String strPath = path.toString();
return Paths.get(strPath.substring(0, strPath.lastIndexOf('.')));
};
return Optional.ofNullable(file).filter(filter).map(removeExtensionFunction).orElse(file);
}
}
| apache-2.0 |
oyhf521/school | school-poi/src/main/java/cn/afterturn/easypoi/entity/vo/BasePOIConstants.java | 1088 | /**
* Copyright 2013-2015 JueYue (qrb.jueyue@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cn.afterturn.easypoi.entity.vo;
/**
* 基础POI常量
* @author admin
* 2014年6月30日 下午9:23:37
*/
interface BasePOIConstants {
/**
* 注解对象
*/
public final static String CLASS = "entity";
/**
*表格参数
*/
public final static String PARAMS = "params";
/**
*下载文件名称
*/
public final static String FILE_NAME = "fileName";
}
| apache-2.0 |
kokeroulis/AndroidUtils | jsonapiparser/src/main/java/gr/kokeroulis/jsonapiparser/ReverseFieldJson.java | 898 | package gr.kokeroulis.jsonapiparser;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/* What is this?
* Sometimes we might have the same element inside the attributes and the included.
* So this annotation is here to help us solve that issue.
* E.x. we have the attributes Foo and Bar. And Bar has a relationship with name "Foo"
* which has nothing to do with the "Foo" attribute. So we use this annotation to rename
* the "Foo" attribute to "BAZ".
*
* use it like this
*
* public class SomePojo {
*
* @ReverseFieldJson(name = "foo")
* public Foo baz;
*
* @Relationship(type = "some_relationship_key")
* public Bar foo;
*
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(value = ElementType.FIELD)
public @interface ReverseFieldJson {
String name();
} | apache-2.0 |
logginghub/core | logginghub-messaging/src/main/java/com/logginghub/messaging_experimental/MessagingClient.java | 6570 | package com.logginghub.messaging_experimental;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import com.logginghub.utils.ArrayUtils;
import com.logginghub.utils.ExceptionHandler;
import com.logginghub.utils.WorkerThread;
import edu.emory.mathcs.backport.java.util.concurrent.BlockingDeque;
import edu.emory.mathcs.backport.java.util.concurrent.LinkedBlockingDeque;
public class MessagingClient {
private List<InetSocketAddress> connectionPoints = new CopyOnWriteArrayList<InetSocketAddress>();
private int nextConnectionPoint = -1;
private CountDownLatch readLatch = new CountDownLatch(1);
private long reconnectionDelay = 5000;
private ExceptionHandler exceptionHandler;
private BlockingDeque writeQueue = new LinkedBlockingDeque();
private MessageSerializer messageSerializer = new Version0MessageSerializer();
private MessageSerializer[] messageSerializers = new MessageSerializer[] { messageSerializer };
private Socket socket;
private WorkerThread connectionThread = new WorkerThread("ConnectionThread") {
@Override protected void onRun() throws Throwable {
connectionThreadMain();
}
};
private WorkerThread readThread = new WorkerThread("ReadThread") {
@Override protected void onRun() throws Throwable {
readThreadMain();
}
};
public void addConnectionPoint(InetSocketAddress connectionPoint) {
connectionPoints.add(connectionPoint);
if (nextConnectionPoint == -1) {
nextConnectionPoint = 0;
}
}
public synchronized void start() {
if (connectionPoints.isEmpty()) {
throw new RuntimeException("Please add one or more connection points before you start the client, otherwise we dont know where to connect to.");
}
if (!connectionThread.isRunning()) {
connectionThread.start();
readThread.start();
}
}
public synchronized void stop() {
connectionThread.stop();
readThread.stop();
}
protected void readThreadMain() {
try {
readLatch.await();
runReadLoop();
}
catch (InterruptedException e) {}
}
protected void connectionThreadMain() {
Socket socket = new Socket();
InetSocketAddress address = getNextConnectionPoint();
// TODO : set the socket settings
try {
socket.connect(address);
handleSuccessfulConnect(socket);
}
catch (IOException e) {
if (exceptionHandler != null) {
exceptionHandler.handleException("Exception caught from connect", e);
}
connectionDelay();
}
}
private void handleSuccessfulConnect(Socket socket) {
this.socket = socket;
readLatch.countDown();
// Refresh the latch straight away in case we disconnect almost
// instantly
readLatch = new CountDownLatch(1);
runWriteLoop();
}
private void runWriteLoop() {
while (socket != null && connectionThread.isRunning())
try {
Message message = (Message) writeQueue.takeFirst();
writeMessage(message);
}
catch (InterruptedException e) {
// Not sure we care about this, only socket errors
}
}
private void runReadLoop() {
byte[] readBuffer = new byte[4 * 1024 * 10];
int endMark = 0;
try {
InputStream inputStream = socket.getInputStream();
BufferedInputStream bos = new BufferedInputStream(inputStream);
while (socket != null && readThread.isRunning()) {
try {
if (endMark == readBuffer.length) {
readBuffer = ArrayUtils.doubleSize(readBuffer);
}
int read = bos.read(readBuffer);
endMark += read;
attemptToDecode(readBuffer, endMark);
}
catch (IOException e) {
if (exceptionHandler != null) {
exceptionHandler.handleException("Exception caught from read or decode", e);
}
disconnect();
}
}
}
catch (IOException e) {
if (exceptionHandler != null) {
exceptionHandler.handleException("Exception caught initialising streams", e);
}
disconnect();
}
}
private void attemptToDecode(byte[] readBuffer, int endMarker) {
if (endMarker >= 1) {
ByteBuffer wrapped = ByteBuffer.wrap(readBuffer);
int version = (int) wrapped.get();
MessageSerializer serializerForVersion = messageSerializers[version];
serializerForVersion.attemptToDecode(wrapped);
}
}
private void writeMessage(Message message) {
ByteBuffer serialized = messageSerializer.serialize(message);
try {
socket.getOutputStream().write(serialized.array(), 0, serialized.remaining());
}
catch (IOException e) {
if (exceptionHandler != null) {
exceptionHandler.handleException("Exception caught writing message", e);
}
disconnect();
}
}
private void disconnect() {
if (socket != null) {
try {
socket.close();
}
catch (IOException e) {}
socket = null;
}
readThread.interupt();
}
private void connectionDelay() {
try {
Thread.sleep(reconnectionDelay);
}
catch (InterruptedException e) {}
}
private InetSocketAddress getNextConnectionPoint() {
InetSocketAddress inetSocketAddress = connectionPoints.get(nextConnectionPoint);
nextConnectionPoint++;
if (nextConnectionPoint == connectionPoints.size()) {
nextConnectionPoint = 0;
}
return inetSocketAddress;
}
}
| apache-2.0 |
LorenzReinhart/ONOSnew | protocols/ospf/ctl/src/main/java/org/onosproject/ospf/controller/area/OspfAreaImpl.java | 27792 | /*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.ospf.controller.area;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.MoreObjects;
import com.google.common.base.Objects;
import org.onlab.packet.Ip4Address;
import org.onosproject.ospf.controller.LsaWrapper;
import org.onosproject.ospf.controller.OspfArea;
import org.onosproject.ospf.controller.OspfInterface;
import org.onosproject.ospf.controller.OspfLsa;
import org.onosproject.ospf.controller.OspfLsaType;
import org.onosproject.ospf.controller.OspfLsdb;
import org.onosproject.ospf.controller.OspfNbr;
import org.onosproject.ospf.controller.OspfNeighborState;
import org.onosproject.ospf.controller.impl.OspfNbrImpl;
import org.onosproject.ospf.controller.lsdb.OspfLsdbImpl;
import org.onosproject.ospf.protocol.lsa.LsaHeader;
import org.onosproject.ospf.protocol.lsa.subtypes.OspfLsaLink;
import org.onosproject.ospf.protocol.lsa.types.NetworkLsa;
import org.onosproject.ospf.protocol.lsa.types.RouterLsa;
import org.onosproject.ospf.protocol.util.ChecksumCalculator;
import org.onosproject.ospf.protocol.util.OspfInterfaceState;
import org.onosproject.ospf.protocol.util.OspfParameters;
import org.onosproject.ospf.protocol.util.OspfUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
* Representation an OSPF area and related information.
*/
public class OspfAreaImpl implements OspfArea {
private static final Logger log = LoggerFactory.getLogger(OspfAreaImpl.class);
/**
* Whether AS-external-LSAs will be flooded into/throughout the area.
*/
private boolean externalRoutingCapability;
/**
* Represents a list of all router's interfaces associated with this area.
*/
private List<OspfInterface> ospfInterfaceList;
/**
* The LS Database for this area. It includes router-LSAs, network-LSAs and.
* summary-LSAs. AS-external-LSAs are hold in the OSPF class itself.
*/
private OspfLsdbImpl database;
/**
* A 32-bit number identifying the area.
*/
private Ip4Address areaId;
/**
* Router ID.
*/
private Ip4Address routerId;
/**
* Represents Options like external, opaque capabilities.
*/
private int options;
/**
* Represents Opaque Enable or not.
*/
private boolean isOpaqueEnable;
/**
* Creates an instance of area implementation.
*/
public OspfAreaImpl() {
database = new OspfLsdbImpl(this);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
OspfAreaImpl that = (OspfAreaImpl) o;
return Objects.equal(areaId, that.areaId) &&
Objects.equal(routerId, that.routerId) &&
Objects.equal(externalRoutingCapability, that.externalRoutingCapability) &&
Objects.equal(ospfInterfaceList.size(), that.ospfInterfaceList.size()) &&
Objects.equal(database, that.database);
}
@Override
public int hashCode() {
return Objects.hashCode(areaId, routerId, externalRoutingCapability,
ospfInterfaceList, database);
}
/**
* Gets the router id.
*
* @return router id
*/
public Ip4Address routerId() {
return routerId;
}
/**
* Sets the router id.
*
* @param routerId router id
*/
@JsonProperty("routerId")
public void setRouterId(Ip4Address routerId) {
this.routerId = routerId;
}
/**
* Sets opaque enabled to true or false.
*
* @param isOpaqueEnable true if opaque enabled else false
*/
@JsonProperty("isOpaqueEnable")
public void setIsOpaqueEnabled(boolean isOpaqueEnable) {
this.isOpaqueEnable = isOpaqueEnable;
}
/**
* Gets is opaque enabled or not.
*
* @return true if opaque enabled else false
*/
public boolean isOpaqueEnabled() {
return this.isOpaqueEnable;
}
/**
* Initializes link state database.
*/
public void initializeDb() {
database.initializeDb();
}
/**
* Refreshes the OSPF area information .
* Gets called as soon as the interface is down or neighbor full Router LSA is updated.
*
* @param ospfInterface OSPF interface instance
*/
@Override
public void refreshArea(OspfInterface ospfInterface) {
OspfInterfaceImpl ospfInterfaceImpl = (OspfInterfaceImpl) ospfInterface;
log.debug("Inside refreshArea...!!!");
//If interface state is DR build network LSA.
if (ospfInterfaceImpl.state() == OspfInterfaceState.DR) {
if (ospfInterface.listOfNeighbors().size() > 0) {
//Get the NetworkLsa
NetworkLsa networkLsa = null;
try {
networkLsa = buildNetworkLsa(ospfInterface.ipAddress(), ospfInterface.ipNetworkMask());
} catch (Exception e) {
log.debug("Error while building NetworkLsa {}", e.getMessage());
}
//Add the NetworkLsa to lsdb
database.addLsa(networkLsa, true, ospfInterface);
addToOtherNeighborLsaTxList(networkLsa);
} else {
log.debug("No Neighbors hence not creating NetworkLSA...!!!");
}
}
//Get the router LSA
RouterLsa routerLsa = null;
try {
routerLsa = buildRouterLsa(ospfInterface);
} catch (Exception e) {
log.debug("Error while building RouterLsa {}", e.getMessage());
}
//Add the RouterLSA to lsdb
database.addLsa(routerLsa, true, ospfInterface);
addToOtherNeighborLsaTxList(routerLsa);
}
/**
* Builds a network LSA.
*
* @param interfaceIp interface IP address
* @param mask interface network mask
* @return NetworkLsa instance
* @throws Exception might throws exception
*/
public NetworkLsa buildNetworkLsa(Ip4Address interfaceIp, Ip4Address mask) throws Exception {
// generate the Router-LSA for this Area.
NetworkLsa networkLsa = new NetworkLsa();
networkLsa.setAdvertisingRouter(routerId);
networkLsa.setLinkStateId(interfaceIp.toString());
networkLsa.setLsType(OspfLsaType.NETWORK.value());
networkLsa.setAge(1);
networkLsa.setOptions(2);
networkLsa.setNetworkMask(mask);
//Adding our own router.
networkLsa.addAttachedRouter(routerId());
Iterator iter = ospfInterfaceList.iterator();
OspfInterfaceImpl ospfInterface = null;
while (iter.hasNext()) {
ospfInterface = (OspfInterfaceImpl) iter.next();
if (ospfInterface.ipAddress().equals(interfaceIp)) {
break;
}
}
if (ospfInterface != null) {
List<OspfNbr> neighborsInFullState = getNeighborsInFullState(ospfInterface);
if (neighborsInFullState != null) {
for (OspfNbr ospfnbr : neighborsInFullState) {
networkLsa.addAttachedRouter(ospfnbr.neighborId());
log.debug("Adding attached neighbor:: {}", ospfnbr.neighborId());
}
}
}
networkLsa.setLsSequenceNo(database.getLsSequenceNumber(OspfLsaType.NETWORK));
//Find the byte length and add it in lsa object
ChecksumCalculator checksum = new ChecksumCalculator();
byte[] lsaBytes = networkLsa.asBytes();
networkLsa.setLsPacketLen(lsaBytes.length);
//Convert lsa object to byte again to reflect the packet length which we added.
lsaBytes = networkLsa.asBytes();
//find the checksum
byte[] twoByteChecksum = checksum.calculateLsaChecksum(lsaBytes,
OspfUtil.LSAPACKET_CHECKSUM_POS1,
OspfUtil.LSAPACKET_CHECKSUM_POS2);
int checkSumVal = OspfUtil.byteToInteger(twoByteChecksum);
networkLsa.setLsCheckSum(checkSumVal);
return networkLsa;
}
/**
* Builds Router LSA.
*
* @param ospfInterface Interface instance
* @return routerLsa Router LSA instance
* @throws Exception might throws exception
*/
public RouterLsa buildRouterLsa(OspfInterface ospfInterface) throws Exception {
// generate the Router-LSA for this Area.
RouterLsa routerLsa = new RouterLsa();
routerLsa.setAdvertisingRouter(routerId);
routerLsa.setLinkStateId(routerId.toString());
routerLsa.setLsType(OspfLsaType.ROUTER.value());
routerLsa.setAge(1);
routerLsa.setOptions(options);
routerLsa.setAreaBorderRouter(false);
routerLsa.setAsBoundaryRouter(false);
routerLsa.setVirtualEndPoint(false);
buildLinkForRouterLsa(routerLsa, ospfInterface);
routerLsa.setLsSequenceNo(database.getLsSequenceNumber(OspfLsaType.ROUTER));
//Find the byte length and add it in lsa object
ChecksumCalculator checksum = new ChecksumCalculator();
byte[] lsaBytes = routerLsa.asBytes();
routerLsa.setLsPacketLen(lsaBytes.length);
//Convert lsa object to byte again to reflect the packet length whic we added.
lsaBytes = routerLsa.asBytes();
//find the checksum
byte[] twoByteChecksum = checksum.calculateLsaChecksum(lsaBytes,
OspfUtil.LSAPACKET_CHECKSUM_POS1,
OspfUtil.LSAPACKET_CHECKSUM_POS2);
int checkSumVal = OspfUtil.byteToInteger(twoByteChecksum);
routerLsa.setLsCheckSum(checkSumVal);
return routerLsa;
}
/**
* Builds LSA link for router LSA.
*
* @param routerLsa router LSA instance
* @param ospfInterface interface instance
*/
private void buildLinkForRouterLsa(RouterLsa routerLsa, OspfInterface ospfInterface) {
OspfInterfaceImpl nextInterface;
Iterator interfaces = ospfInterfaceList.iterator();
while (interfaces.hasNext()) {
nextInterface = (OspfInterfaceImpl) interfaces.next();
if (nextInterface.state() == OspfInterfaceState.DOWN) {
continue;
} else if (nextInterface.state() == OspfInterfaceState.LOOPBACK) {
OspfLsaLink link = new OspfLsaLink();
link.setLinkData("-1");
link.setLinkId(nextInterface.ipAddress().toString());
link.setLinkType(3);
link.setMetric(0);
link.setTos(0);
routerLsa.addRouterLink(link);
routerLsa.incrementLinkNo();
} else if (nextInterface.state() == OspfInterfaceState.POINT2POINT) {
// adding all neighbour routers
List<OspfNbr> neighborsInFullState = getNeighborsInFullState(nextInterface);
if (neighborsInFullState != null) {
log.debug("Adding OspfLsaLink ::neighborsInFullState {}, InterfaceIP: {}",
neighborsInFullState.size(), nextInterface.ipAddress());
for (OspfNbr ospfnbr : neighborsInFullState) {
OspfLsaLink link = new OspfLsaLink();
link.setLinkData(nextInterface.ipAddress().toString());
link.setLinkId(ospfnbr.neighborId().toString());
link.setLinkType(1);
link.setMetric(0);
link.setTos(0);
routerLsa.addRouterLink(link);
routerLsa.incrementLinkNo();
log.debug("Added OspfLsaLink :: {}, neighborIP: {}, routerLinks: {}",
ospfnbr.neighborId(), ospfnbr.neighborIpAddr(), routerLsa.noLink());
}
}
// adding the self address
OspfLsaLink link = new OspfLsaLink();
link.setLinkData(nextInterface.ipNetworkMask().toString());
link.setLinkId(nextInterface.ipAddress().toString());
link.setLinkType(3);
link.setMetric(0);
link.setTos(0);
routerLsa.addRouterLink(link);
routerLsa.incrementLinkNo();
} else {
buildLinkForRouterLsaBroadcast(routerLsa, nextInterface);
}
}
}
/**
* Builds LSA link for router LSA.
*
* @param routerLsa router LSA instance
* @param ospfInterface interface instance
*/
private void buildLinkForRouterLsaBroadcast(RouterLsa routerLsa, OspfInterface ospfInterface) {
OspfInterfaceImpl ospfInterfaceImpl = (OspfInterfaceImpl) ospfInterface;
if (ospfInterfaceImpl.state() == OspfInterfaceState.WAITING) {
OspfLsaLink link = new OspfLsaLink();
link.setLinkData(ospfInterface.ipNetworkMask().toString());
//Link id should be set to ip network number
link.setLinkId(ospfInterface.ipAddress().toString());
link.setLinkType(3);
link.setMetric(0);
link.setTos(0);
routerLsa.addRouterLink(link);
routerLsa.incrementLinkNo();
} else if (ospfInterfaceImpl.state() == OspfInterfaceState.DR) {
OspfLsaLink link = new OspfLsaLink();
link.setLinkData(ospfInterface.ipAddress().toString());
link.setLinkId(ospfInterface.ipAddress().toString());
link.setLinkType(2);
link.setMetric(0);
link.setTos(0);
routerLsa.addRouterLink(link);
routerLsa.incrementLinkNo();
} else if (ospfInterfaceImpl.state() == OspfInterfaceState.BDR ||
ospfInterfaceImpl.state() == OspfInterfaceState.DROTHER) {
OspfLsaLink link = new OspfLsaLink();
link.setLinkData(ospfInterface.ipAddress().toString());
link.setLinkId(ospfInterface.dr().toString());
link.setLinkType(2);
link.setMetric(0);
link.setTos(0);
routerLsa.addRouterLink(link);
routerLsa.incrementLinkNo();
}
}
/**
* Gets the area id.
*
* @return area id
*/
public Ip4Address areaId() {
return areaId;
}
/**
* Sets the area id.
*
* @param areaId area id
*/
@JsonProperty("areaId")
public void setAreaId(Ip4Address areaId) {
this.areaId = areaId;
}
/**
* Gets external routing capability.
*
* @return true if external routing capable, else false
*/
public boolean isExternalRoutingCapability() {
return externalRoutingCapability;
}
/**
* Sets external routing capability.
*
* @param externalRoutingCapability true if external routing capable, else false
*/
@JsonProperty("externalRoutingCapability")
public void setExternalRoutingCapability(boolean externalRoutingCapability) {
this.externalRoutingCapability = externalRoutingCapability;
}
/**
* Gets the list of interfaces in this area.
*
* @return list of interfaces
*/
public List<OspfInterface> ospfInterfaceList() {
return ospfInterfaceList;
}
/**
* Sets the list of interfaces attached to the area.
*
* @param ospfInterfaceList list of OspfInterface instances
*/
@JsonProperty("interface")
public void setOspfInterfaceList(List<OspfInterface> ospfInterfaceList) {
this.ospfInterfaceList = ospfInterfaceList;
}
/**
* Checks all neighbors belonging to this area whether they are in state EXCHANGE or LOADING.
* Return false if there is at least one, else return true. This Method is used by
* "processReceivedLsa()" in the neighbor class.
*
* @return boolean indicating that there is no Neighbor in Database Exchange
*/
public boolean noNeighborInLsaExchangeProcess() {
OspfInterfaceImpl nextInterface;
OspfNeighborState nextNeighborState;
Iterator interfaces = ospfInterfaceList.iterator();
while (interfaces.hasNext()) {
nextInterface = (OspfInterfaceImpl) interfaces.next();
Iterator neighbors = nextInterface.listOfNeighbors().values().iterator();
while (neighbors.hasNext()) {
nextNeighborState = ((OspfNbrImpl) neighbors.next()).getState();
if (nextNeighborState == OspfNeighborState.EXCHANGE ||
nextNeighborState == OspfNeighborState.LOADING) {
return false;
}
}
}
return true;
}
/**
* Gets header of all types of LSAs.
*
* @param excludeMaxAgeLsa need to include(true) or exclude(false) maxage lsa's
* @param isOpaquecapable need to include(true) or exclude(false) Type 10 Opaque lsa's
* @return list of lsa header in the lsdb
*/
public List getLsaHeaders(boolean excludeMaxAgeLsa, boolean isOpaquecapable) {
return database.getAllLsaHeaders(excludeMaxAgeLsa, isOpaquecapable);
}
/**
* Gets the LSA from LSDB based on the input.
*
* @param lsType type of lsa to form the key
* @param linkStateID link state id to form the key
* @param advertisingRouter advertising router to form the key
* @return lsa wrapper instance which contains the Lsa
* @throws Exception might throws exception
*/
public LsaWrapper getLsa(int lsType, String linkStateID, String advertisingRouter) throws Exception {
String lsaKey = lsType + "-" + linkStateID + "-" + advertisingRouter;
if (lsType == OspfParameters.LINK_LOCAL_OPAQUE_LSA || lsType == OspfParameters.AREA_LOCAL_OPAQUE_LSA ||
lsType == OspfParameters.AS_OPAQUE_LSA) {
byte[] linkStateAsBytes = InetAddress.getByName(linkStateID).getAddress();
int opaqueType = linkStateAsBytes[0];
int opaqueId = OspfUtil.byteToInteger(Arrays.copyOfRange(linkStateAsBytes, 1,
linkStateAsBytes.length));
lsaKey = lsType + "-" + opaqueType + opaqueId + "-" + advertisingRouter;
}
return database.findLsa(lsType, lsaKey);
}
/**
* Checks whether an instance of the given LSA exists in the database belonging to this area.
* If so return true else false.
*
* @param lookupLsa ospf LSA instance to lookup
* @return LSA wrapper instance which contains the Lsa
*/
public LsaWrapper lsaLookup(OspfLsa lookupLsa) {
return database.lsaLookup((LsaHeader) lookupLsa);
}
/**
* Checks whether an instance of the given LSA exists in the database belonging to this area.
* If so return true else false.
*
* @param lsa1 OSPF LSA instance to compare
* @param lsa2 OSPF LSA instance to compare
* @return "same" if both instances are same, "latest" if lsa1 is latest, or "old" if lsa1 is old
*/
public String isNewerOrSameLsa(OspfLsa lsa1, OspfLsa lsa2) {
return database.isNewerOrSameLsa((LsaHeader) lsa1, (LsaHeader) lsa2);
}
/**
* Methods gets called from ChannelHandler to add the received LSA to LSDB.
*
* @param ospfLsa OSPF LSA instance
* @param ospfInterface OSPF interface instance
* @throws Exception on error
*/
public void addLsa(OspfLsa ospfLsa, OspfInterface ospfInterface) throws Exception {
//second param is false as lsa from network
database.addLsa((LsaHeader) ospfLsa, false, ospfInterface);
}
/**
* Methods gets called from ChannelHandler to add the received LSA to LSDB.
*
* @param ospfLsa OSPF LSA instance
* @param isSelfOriginated true if the LSA is self originated. Else false
* @param ospfInterface OSPF interface instance
* @throws Exception on error
*/
public void addLsa(OspfLsa ospfLsa, boolean isSelfOriginated, OspfInterface ospfInterface)
throws Exception {
database.addLsa((LsaHeader) ospfLsa, isSelfOriginated, ospfInterface);
}
/**
* Adds the LSA to maxAge bin.
*
* @param key key to add it to LSDB
* @param wrapper LSA wrapper instance
*/
public void addLsaToMaxAgeBin(String key, LsaWrapper wrapper) {
database.addLsaToMaxAgeBin(key, wrapper);
}
/**
* Sets router sequence number for router LSA.
*
* @param newSequenceNumber sequence number
*/
public void setDbRouterSequenceNumber(long newSequenceNumber) {
database.setRouterLsaSeqNo(newSequenceNumber);
}
/**
* Methods gets called from ChannelHandler to delete the LSA.
*
* @param ospfLsa the LSA instance to delete
*/
public void deleteLsa(LsaHeader ospfLsa) {
database.deleteLsa(ospfLsa);
}
/**
* Removes LSA from bin.
*
* @param lsaWrapper the LSA wrapper instance to delete
*/
public void removeLsaFromBin(LsaWrapper lsaWrapper) {
database.removeLsaFromBin(lsaWrapper);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass())
.omitNullValues()
.add("areaID", areaId)
.add("ospfInterfaceList", ospfInterfaceList)
.add("externalRoutingCapability", externalRoutingCapability)
.toString();
}
/**
* Checks all Neighbors belonging to this Area whether they are in state lesser than the EXCHANGE.
* Creates list of such neighbors
* Returns list of neighbors who satisfy the conditions
*
* @param ospfInterface OSPF interface instance
* @return List of interfaces having state lesser than exchange
*/
public List<OspfNbr> getNeighborsInFullState(OspfInterface ospfInterface) {
List<OspfNbr> listEligibleNeighbors = null;
OspfNbrImpl ospfNeighbor = null;
OspfNeighborState nextNeighborState;
Iterator nbrInterface = ospfInterface.listOfNeighbors().values().iterator();
while (nbrInterface.hasNext()) {
ospfNeighbor = (OspfNbrImpl) nbrInterface.next();
nextNeighborState = ospfNeighbor.getState();
if (nextNeighborState.getValue() == OspfNeighborState.FULL.getValue()) {
if (listEligibleNeighbors == null) {
listEligibleNeighbors = new ArrayList<OspfNbr>();
listEligibleNeighbors.add(ospfNeighbor);
} else {
listEligibleNeighbors.add(ospfNeighbor);
}
}
}
return listEligibleNeighbors;
}
/**
* Gets the LSDB LSA key from LSA header.
*
* @param lsaHeader LSA header instance
* @return key LSA key
*/
public String getLsaKey(LsaHeader lsaHeader) {
return database.getLsaKey(lsaHeader);
}
/**
* Adds the received LSA in other neighbors tx list.
*
* @param recLsa LSA Header instance
*/
public void addToOtherNeighborLsaTxList(LsaHeader recLsa) {
//Add the received LSA in other neighbors retransmission list.
log.debug("OspfAreaImpl: addToOtherNeighborLsaTxList");
List<OspfInterface> ospfInterfaces = ospfInterfaceList();
for (OspfInterface ospfInterfaceFromArea : ospfInterfaces) {
Map neighbors = ospfInterfaceFromArea.listOfNeighbors();
for (Object neighborIP : neighbors.keySet()) {
OspfNbrImpl nbr = (OspfNbrImpl) neighbors.get(neighborIP);
if (nbr.getState().getValue() < OspfNeighborState.EXCHANGE.getValue()) {
continue;
}
String key = database.getLsaKey(recLsa);
if (nbr.getState() == OspfNeighborState.EXCHANGE || nbr.getState() == OspfNeighborState.LOADING) {
if (nbr.getLsReqList().containsKey(key)) {
LsaWrapper lsWrapper = lsaLookup(recLsa);
if (lsWrapper != null) {
LsaHeader ownLsa = (LsaHeader) lsWrapper.ospfLsa();
String status = isNewerOrSameLsa(recLsa, ownLsa);
if (status.equals("old")) {
continue;
} else if (status.equals("same")) {
log.debug("OspfAreaImpl: addToOtherNeighborLsaTxList: " +
"Removing lsa from reTxtList {}", key);
nbr.getLsReqList().remove(key);
continue;
} else {
log.debug("OspfAreaImpl: addToOtherNeighborLsaTxList: " +
"Removing lsa from reTxtList {}", key);
nbr.getLsReqList().remove(key);
}
}
}
}
if (recLsa.advertisingRouter().toString().equals((String) neighborIP)) {
continue;
}
if ((recLsa.lsType() == OspfParameters.LINK_LOCAL_OPAQUE_LSA ||
recLsa.lsType() == OspfParameters.AREA_LOCAL_OPAQUE_LSA)) {
if (nbr.isOpaqueCapable()) {
log.debug("OspfAreaImpl: addToOtherNeighborLsaTxList: Adding lsa to reTxtList {}",
recLsa);
nbr.getReTxList().put(key, recLsa);
}
} else {
log.debug("OspfAreaImpl: addToOtherNeighborLsaTxList: Adding lsa to reTxtList {}",
recLsa);
nbr.getReTxList().put(key, recLsa);
}
}
}
}
/**
* Gets the options value.
*
* @return options value
*/
public int options() {
return options;
}
/**
* Sets the options value.
*
* @param options options value
*/
public void setOptions(int options) {
this.options = options;
}
/**
* Gets the opaque enabled options value.
*
* @return opaque enabled options value
*/
public int opaqueEnabledOptions() {
return Integer.parseInt(OspfParameters.OPAQUE_ENABLED_OPTION_VALUE, 2);
}
/**
* Gets the lsdb instance for this area.
*
* @return lsdb instance
*/
public OspfLsdb database() {
return database;
}
}
| apache-2.0 |
ryansgot/forsuredbcompiler | dbinfo/src/main/java/com/fsryan/forsuredb/info/TableForeignKeyInfo.java | 2481 | /*
forsuredb-dbinfo, value classes for the forsuredb project
Copyright 2017 Ryan Scott
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.fsryan.forsuredb.info;
import com.google.auto.value.AutoValue;
import javax.annotation.Nullable;
import java.util.Map;
/**
* <p>
* An alternative means of serializing foreign keys on the table. The old way,
* {@link ForeignKeyInfo} gets serialized on columns. The old way was inflexible
* because it did not allow for composite keys--at least not in any sensible way.
* </p>
*/
@AutoValue
public abstract class TableForeignKeyInfo {
public static Builder builder() {
return new AutoValue_TableForeignKeyInfo.Builder()
.foreignTableName("");
}
// not known until later on . . . so hack to set this value on an otherwise immutable class
public abstract String foreignTableApiClassName(); // foreign_table_api_class_name
public abstract String foreignTableName(); // foreign_table_name
public abstract Map<String, String> localToForeignColumnMap(); // local_to_foreign_column_map
public abstract String updateChangeAction(); // update_action
public abstract String deleteChangeAction(); // delete_action
public abstract Builder toBuilder();
@AutoValue.Builder
public static abstract class Builder {
public abstract Builder foreignTableApiClassName(String foreignTableApiClassName); // foreign_table_api_class_name
public abstract Builder foreignTableName(String foreignTableName); // foreign_table_name
public abstract Builder localToForeignColumnMap(Map<String, String> localToForeignColumnMap); // local_to_foreign_column_map
public abstract Builder updateChangeAction(String updateAction); // update_action
public abstract Builder deleteChangeAction(String deleteAction); // delete_action
public abstract TableForeignKeyInfo build();
}
}
| apache-2.0 |
genome-vendor/libcommons-jexl2-java | src/test/java/org/apache/commons/jexl2/CacheTest.java | 23178 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.jexl2;
import java.util.List;
import java.util.Map;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.concurrent.Callable;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
/**
* Verifies cache & tryExecute
*/
public class CacheTest extends JexlTestCase {
public CacheTest(String testName) {
super(testName);
}
private static final JexlEngine jexl = createEngine(false);
static {
jexl.setCache(512);
jexl.setSilent(false);
}
@Override
public void setUp() throws Exception {
// ensure jul logging is only error to avoid warning in silent mode
java.util.logging.Logger.getLogger(JexlEngine.class.getName()).setLevel(java.util.logging.Level.SEVERE);
}
// LOOPS & THREADS
private static final int LOOPS = 4096;
private static final int NTHREADS = 4;
// A pseudo random mix of accessors
private static final int[] MIX = {
0, 0, 3, 3, 4, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 1, 1, 1, 2, 2, 2,
3, 3, 3, 4, 4, 4, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 2, 2, 3, 3, 0
};
@Override
protected void tearDown() throws Exception {
debuggerCheck(jexl);
}
/**
* A set of classes that define different getter/setter methods for the same properties.
* The goal is to verify that the cached JexlPropertyGet / JexlPropertySet in the AST Nodes are indeed
* volatile and do not generate errors even when multiple threads concurently hammer them.
*/
public static class Cached {
public String compute(String arg) {
if (arg == null) {
arg = "na";
}
return getClass().getSimpleName() + "@s#" + arg;
}
public String compute(String arg0, String arg1) {
if (arg0 == null) {
arg0 = "na";
}
if (arg1 == null) {
arg1 = "na";
}
return getClass().getSimpleName() + "@s#" + arg0 + ",s#" + arg1;
}
public String compute(Integer arg) {
return getClass().getSimpleName() + "@i#" + arg;
}
public String compute(float arg) {
return getClass().getSimpleName() + "@f#" + arg;
}
public String compute(int arg0, int arg1) {
return getClass().getSimpleName() + "@i#" + arg0 + ",i#" + arg1;
}
public String ambiguous(Integer arg0, int arg1) {
return getClass().getSimpleName() + "!i#" + arg0 + ",i#" + arg1;
}
public String ambiguous(int arg0, Integer arg1) {
return getClass().getSimpleName() + "!i#" + arg0 + ",i#" + arg1;
}
public static String COMPUTE(String arg) {
if (arg == null) {
arg = "na";
}
return "CACHED@s#" + arg;
}
public static String COMPUTE(String arg0, String arg1) {
if (arg0 == null) {
arg0 = "na";
}
if (arg1 == null) {
arg1 = "na";
}
return "CACHED@s#" + arg0 + ",s#" + arg1;
}
public static String COMPUTE(int arg) {
return "CACHED@i#" + arg;
}
public static String COMPUTE(int arg0, int arg1) {
return "CACHED@i#" + arg0 + ",i#" + arg1;
}
}
public static class Cached0 extends Cached {
protected String value = "Cached0:new";
protected Boolean flag = Boolean.FALSE;
public Cached0() {
}
public String getValue() {
return value;
}
public void setValue(String arg) {
if (arg == null) {
arg = "na";
}
value = "Cached0:" + arg;
}
public void setFlag(boolean b) {
flag = Boolean.valueOf(b);
}
public boolean isFlag() {
return flag.booleanValue();
}
}
public static class Cached1 extends Cached0 {
@Override
public void setValue(String arg) {
if (arg == null) {
arg = "na";
}
value = "Cached1:" + arg;
}
}
public static class Cached2 extends Cached {
boolean flag = false;
protected String value;
public Cached2() {
value = "Cached2:new";
}
public Object get(String prop) {
if ("value".equals(prop)) {
return value;
} else if ("flag".equals(prop)) {
return Boolean.valueOf(flag);
}
throw new RuntimeException("no such property");
}
public void set(String p, Object v) {
if (v == null) {
v = "na";
}
if ("value".equals(p)) {
value = getClass().getSimpleName() + ":" + v;
} else if ("flag".equals(p)) {
flag = Boolean.parseBoolean(v.toString());
} else {
throw new RuntimeException("no such property");
}
}
}
public static class Cached3 extends java.util.TreeMap<String, Object> {
private static final long serialVersionUID = 1L;
boolean flag = false;
public Cached3() {
put("value", "Cached3:new");
put("flag", "false");
}
@Override
public Object get(Object key) {
return super.get(key.toString());
}
@Override
public Object put(String key, Object arg) {
if (arg == null) {
arg = "na";
}
arg = "Cached3:" + arg;
return super.put(key, arg);
}
public void setflag(boolean b) {
flag = b;
}
public boolean isflag() {
return flag;
}
}
public static class Cached4 extends java.util.ArrayList<String> {
private static final long serialVersionUID = 1L;
public Cached4() {
super.add("Cached4:new");
super.add("false");
}
public String getValue() {
return super.get(0);
}
public void setValue(String arg) {
if (arg == null) {
arg = "na";
}
super.set(0, "Cached4:" + arg);
}
public void setflag(Boolean b) {
super.set(1, b.toString());
}
public boolean isflag() {
return Boolean.parseBoolean(super.get(1));
}
}
/**
* A helper class to pass arguments in tests (instances of getter/setter exercising classes).
*/
static class TestCacheArguments {
Cached0 c0 = new Cached0();
Cached1 c1 = new Cached1();
Cached2 c2 = new Cached2();
Cached3 c3 = new Cached3();
Cached4 c4 = new Cached4();
Object[] ca = {
c0, c1, c2, c3, c4
};
Object[] value = null;
}
/**
* Run same test function in NTHREADS in parallel.
* @param ctask the task / test
* @param loops number of loops to perform
* @param cache whether jexl cache is used or not
* @throws Exception if anything goes wrong
*/
@SuppressWarnings("boxing")
void runThreaded(Class<? extends Task> ctask, int loops, boolean cache) throws Exception {
if (loops == 0) {
loops = MIX.length;
}
if (cache) {
jexl.setCache(32);
} else {
jexl.setCache(0);
}
java.util.concurrent.ExecutorService execs = java.util.concurrent.Executors.newFixedThreadPool(NTHREADS);
List<Callable<Integer>> tasks = new ArrayList<Callable<Integer>>(NTHREADS);
for(int t = 0; t < NTHREADS; ++t) {
tasks.add(jexl.newInstance(ctask, loops));
}
// let's not wait for more than a minute
List<Future<Integer>> futures = execs.invokeAll(tasks, 60, TimeUnit.SECONDS);
// check that all returned loops
for(Future<Integer> future : futures) {
assertEquals(Integer.valueOf(loops), future.get());
}
}
/**
* The base class for MT tests.
*/
public abstract static class Task implements Callable<Integer> {
final TestCacheArguments args = new TestCacheArguments();
final int loops;
final Map<String, Object> vars = new HashMap<String, Object>();
final JexlContext jc = new MapContext(vars);
Task(int loops) {
this.loops = loops;
}
public abstract Integer call() throws Exception;
/**
* The actual test function; assigns and checks.
* <p>The expression will be evaluated against different classes in parallel.
* This verifies that neither the volatile cache in the AST nor the expression cache in the JEXL engine
* induce errors.</p>
* <p>
* Using it as a micro benchmark, it shows creating expression as the dominating cost; the expression
* cache takes care of this.
* By moving the expression creations out of the main loop, it also shows that the volatile cache speeds
* things up around 2x.
* </p>
* @param value the argument value to control
* @return the number of loops performed
*/
public Integer runAssign(Object value) {
args.value = new Object[]{value};
Object result;
Expression cacheGetValue = jexl.createExpression("cache.value");
Expression cacheSetValue = jexl.createExpression("cache.value = value");
for (int l = 0; l < loops; ++l) {
int px = (int) Thread.currentThread().getId();
int mix = MIX[(l + px) % MIX.length];
vars.put("cache", args.ca[mix]);
vars.put("value", args.value[0]);
result = cacheSetValue.evaluate(jc);
if (args.value[0] == null) {
assertNull(cacheSetValue.toString(), result);
} else {
assertEquals(cacheSetValue.toString(), args.value[0], result);
}
result = cacheGetValue.evaluate(jc);
if (args.value[0] == null) {
assertEquals(cacheGetValue.toString(), "Cached" + mix + ":na", result);
} else {
assertEquals(cacheGetValue.toString(), "Cached" + mix + ":" + args.value[0], result);
}
}
return Integer.valueOf(loops);
}
}
/**
* A task to check assignment.
*/
public static class AssignTask extends Task {
public AssignTask(int loops) {
super(loops);
}
@Override
public Integer call() throws Exception {
return runAssign("foo");
}
}
/**
* A task to check null assignment.
*/
public static class AssignNullTask extends Task {
public AssignNullTask(int loops) {
super(loops);
}
@Override
public Integer call() throws Exception {
return runAssign(null);
}
}
/**
* A task to check boolean assignment.
*/
public static class AssignBooleanTask extends Task {
public AssignBooleanTask(int loops) {
super(loops);
}
@Override
public Integer call() throws Exception {
return runAssignBoolean(Boolean.TRUE);
}
/** The actual test function. */
private Integer runAssignBoolean(Boolean value) {
args.value = new Object[]{value};
Expression cacheGetValue = jexl.createExpression("cache.flag");
Expression cacheSetValue = jexl.createExpression("cache.flag = value");
Object result;
for (int l = 0; l < loops; ++l) {
int px = (int) Thread.currentThread().getId();
int mix = MIX[(l + px) % MIX.length];
vars.put("cache", args.ca[mix]);
vars.put("value", args.value[0]);
result = cacheSetValue.evaluate(jc);
assertEquals(cacheSetValue.toString(), args.value[0], result);
result = cacheGetValue.evaluate(jc);
assertEquals(cacheGetValue.toString(), args.value[0], result);
}
return Integer.valueOf(loops);
}
}
/**
* A task to check list assignment.
*/
public static class AssignListTask extends Task {
public AssignListTask(int loops) {
super(loops);
}
@Override
public Integer call() throws Exception {
return runAssignList();
}
/** The actual test function. */
private Integer runAssignList() {
args.value = new Object[]{"foo"};
java.util.ArrayList<String> c1 = new java.util.ArrayList<String>(2);
c1.add("foo");
c1.add("bar");
args.ca = new Object[]{
new String[]{"one", "two"},
c1
};
Expression cacheGetValue = jexl.createExpression("cache.0");
Expression cacheSetValue = jexl.createExpression("cache[0] = value");
Object result;
for (int l = 0; l < loops; ++l) {
int px = (int) Thread.currentThread().getId();
int mix = MIX[(l + px) % MIX.length] % args.ca.length;
vars.put("cache", args.ca[mix]);
vars.put("value", args.value[0]);
result = cacheSetValue.evaluate(jc);
assertEquals(cacheSetValue.toString(), args.value[0], result);
result = cacheGetValue.evaluate(jc);
assertEquals(cacheGetValue.toString(), args.value[0], result);
}
return Integer.valueOf(loops);
}
}
public void testNullAssignNoCache() throws Exception {
runThreaded(AssignNullTask.class, LOOPS, false);
}
public void testNullAssignCache() throws Exception {
runThreaded(AssignNullTask.class, LOOPS, true);
}
public void testAssignNoCache() throws Exception {
runThreaded(AssignTask.class, LOOPS, false);
}
public void testAssignCache() throws Exception {
runThreaded(AssignTask.class, LOOPS, true);
}
public void testAssignBooleanNoCache() throws Exception {
runThreaded(AssignBooleanTask.class, LOOPS, false);
}
public void testAssignBooleanCache() throws Exception {
runThreaded(AssignBooleanTask.class, LOOPS, true);
}
public void testAssignListNoCache() throws Exception {
runThreaded(AssignListTask.class, LOOPS, false);
}
public void testAssignListCache() throws Exception {
runThreaded(AssignListTask.class, LOOPS, true);
}
/**
* A task to check method calls.
*/
public static class ComputeTask extends Task {
public ComputeTask(int loops) {
super(loops);
}
@Override
public Integer call() throws Exception {
args.ca = new Object[]{args.c0, args.c1, args.c2};
args.value = new Object[]{new Integer(2), "quux"};
//jexl.setDebug(true);
Expression compute2 = jexl.createExpression("cache.compute(a0, a1)");
Expression compute1 = jexl.createExpression("cache.compute(a0)");
Expression compute1null = jexl.createExpression("cache.compute(a0)");
Expression ambiguous = jexl.createExpression("cache.ambiguous(a0, a1)");
//jexl.setDebug(false);
Object result = null;
String expected = null;
for (int l = 0; l < loops; ++l) {
int mix = MIX[l % MIX.length] % args.ca.length;
Object value = args.value[l % args.value.length];
vars.put("cache", args.ca[mix]);
if (value instanceof String) {
vars.put("a0", "S0");
vars.put("a1", "S1");
expected = "Cached" + mix + "@s#S0,s#S1";
} else if (value instanceof Integer) {
vars.put("a0", Integer.valueOf(7));
vars.put("a1", Integer.valueOf(9));
expected = "Cached" + mix + "@i#7,i#9";
} else {
fail("unexpected value type");
}
result = compute2.evaluate(jc);
assertEquals(compute2.toString(), expected, result);
if (value instanceof Integer) {
try {
vars.put("a0", Short.valueOf((short) 17));
vars.put("a1", Short.valueOf((short) 19));
result = ambiguous.evaluate(jc);
fail("should have thrown an exception");
} catch (JexlException xany) {
// throws due to ambiguous exception
}
}
if (value instanceof String) {
vars.put("a0", "X0");
expected = "Cached" + mix + "@s#X0";
} else if (value instanceof Integer) {
vars.put("a0", Integer.valueOf(5));
expected = "Cached" + mix + "@i#5";
} else {
fail("unexpected value type");
}
result = compute1.evaluate(jc);
assertEquals(compute1.toString(), expected, result);
try {
vars.put("a0", null);
result = compute1null.evaluate(jc);
fail("should have thrown an exception");
} catch (JexlException xany) {
// throws due to ambiguous exception
String sany = xany.getMessage();
String tname = getClass().getName();
if (!sany.startsWith(tname)) {
fail("debug mode should carry caller information, "
+ sany + ", "
+ tname);
}
}
}
return Integer.valueOf(loops);
}
}
public void testComputeNoCache() throws Exception {
try {
jexl.setDebug(true);
runThreaded(ComputeTask.class, LOOPS, false);
} finally {
jexl.setDebug(false);
}
}
public void testComputeCache() throws Exception {
try {
jexl.setDebug(true);
runThreaded(ComputeTask.class, LOOPS, true);
} finally {
jexl.setDebug(false);
}
}
/**
* The remaining tests exercise the namespaced functions; not MT.
* @param x
* @param loops
* @param cache
* @throws Exception
*/
void doCOMPUTE(TestCacheArguments x, int loops, boolean cache) throws Exception {
if (loops == 0) {
loops = MIX.length;
}
if (cache) {
jexl.setCache(32);
} else {
jexl.setCache(0);
}
Map<String, Object> vars = new HashMap<String, Object>();
JexlContext jc = new MapContext(vars);
java.util.Map<String, Object> funcs = new java.util.HashMap<String, Object>();
jexl.setFunctions(funcs);
Expression compute2 = jexl.createExpression("cached:COMPUTE(a0, a1)");
Expression compute1 = jexl.createExpression("cached:COMPUTE(a0)");
Object result = null;
String expected = null;
for (int l = 0; l < loops; ++l) {
int mix = MIX[l % MIX.length] % x.ca.length;
Object value = x.value[l % x.value.length];
funcs.put("cached", x.ca[mix]);
if (value instanceof String) {
vars.put("a0", "S0");
vars.put("a1", "S1");
expected = "CACHED@s#S0,s#S1";
} else if (value instanceof Integer) {
vars.put("a0", Integer.valueOf(7));
vars.put("a1", Integer.valueOf(9));
expected = "CACHED@i#7,i#9";
} else {
fail("unexpected value type");
}
result = compute2.evaluate(jc);
assertEquals(compute2.toString(), expected, result);
if (value instanceof String) {
vars.put("a0", "X0");
expected = "CACHED@s#X0";
} else if (value instanceof Integer) {
vars.put("a0", Integer.valueOf(5));
expected = "CACHED@i#5";
} else {
fail("unexpected value type");
}
result = compute1.evaluate(jc);
assertEquals(compute1.toString(), expected, result);
}
}
public void testCOMPUTENoCache() throws Exception {
TestCacheArguments args = new TestCacheArguments();
args.ca = new Object[]{
Cached.class, Cached1.class, Cached2.class
};
args.value = new Object[]{new Integer(2), "quux"};
doCOMPUTE(args, LOOPS, false);
}
public void testCOMPUTECache() throws Exception {
TestCacheArguments args = new TestCacheArguments();
args.ca = new Object[]{
Cached.class, Cached1.class, Cached2.class
};
args.value = new Object[]{new Integer(2), "quux"};
doCOMPUTE(args, LOOPS, true);
}
}
| apache-2.0 |
etirelli/kie-wb-distributions | kie-wb-parent/kie-wb-monitoring-webapp/src/test/java/org/kie/workbench/client/KieWorkbenchEntryPointTest.java | 9162 | /*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.client;
import com.google.gwtmockito.GwtMockitoTestRunner;
import org.dashbuilder.client.cms.screen.explorer.NavigationExplorerScreen;
import org.dashbuilder.client.navigation.NavigationManager;
import org.dashbuilder.client.navigation.event.NavTreeLoadedEvent;
import org.dashbuilder.client.navigation.impl.NavigationManagerImpl;
import org.dashbuilder.client.navigation.widget.editor.NavTreeEditor;
import org.dashbuilder.navigation.NavGroup;
import org.dashbuilder.navigation.NavItem;
import org.dashbuilder.navigation.NavTree;
import org.dashbuilder.navigation.service.NavigationServices;
import org.guvnor.common.services.shared.config.AppConfigService;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.workbench.client.navigation.NavTreeDefinitions;
import org.kie.workbench.client.resources.i18n.NavigationConstants;
import org.kie.workbench.common.workbench.client.admin.DefaultAdminPageHelper;
import org.kie.workbench.common.workbench.client.authz.PermissionTreeSetup;
import org.kie.workbench.common.workbench.client.menu.DefaultWorkbenchFeaturesMenusHelper;
import org.mockito.Mock;
import org.uberfire.client.mvp.ActivityBeansCache;
import org.uberfire.client.workbench.Workbench;
import org.uberfire.client.workbench.widgets.menu.megamenu.WorkbenchMegaMenuPresenter;
import org.uberfire.ext.security.management.client.ClientUserSystemManager;
import org.uberfire.mocks.CallerMock;
import org.uberfire.mocks.EventSourceMock;
import org.uberfire.mvp.Command;
import org.uberfire.workbench.model.menu.MenuFactory;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
@RunWith(GwtMockitoTestRunner.class)
public class KieWorkbenchEntryPointTest {
@Mock
private AppConfigService appConfigService;
@Mock
private ActivityBeansCache activityBeansCache;
@Mock
private DefaultWorkbenchFeaturesMenusHelper menusHelper;
@Mock
protected ClientUserSystemManager userSystemManager;
@Mock
protected WorkbenchMegaMenuPresenter menuBar;
@Mock
protected Workbench workbench;
@Mock
protected PermissionTreeSetup permissionTreeSetup;
@Mock
private DefaultAdminPageHelper adminPageHelper;
private NavTreeDefinitions navTreeDefinitions;
private NavigationManager navigationManager;
@Mock
protected NavigationExplorerScreen navigationExplorerScreen;
@Mock
protected NavigationConstants navigationConstants;
@Mock
protected NavigationServices navigationServices;
@Mock
protected NavTreeEditor navTreeEditor;
@Mock
protected EventSourceMock<NavTreeLoadedEvent> navTreeLoadedEvent;
private KieWorkbenchEntryPoint kieWorkbenchEntryPoint;
@Before
public void setup() {
navTreeDefinitions = new NavTreeDefinitions();
navigationManager = new NavigationManagerImpl(new CallerMock<>(navigationServices),
null,
navTreeLoadedEvent,
null,
null);
doAnswer(invocationOnMock -> {
((Command) invocationOnMock.getArguments()[0]).execute();
return null;
}).when(userSystemManager).waitForInitialization(any(Command.class));
doReturn(mock(MenuFactory.TopLevelMenusBuilder.class)).when(menusHelper).buildMenusFromNavTree(any());
CallerMock<AppConfigService> appConfigServiceCallerMock = new CallerMock<>(appConfigService);
kieWorkbenchEntryPoint = spy(new KieWorkbenchEntryPoint(appConfigServiceCallerMock,
activityBeansCache,
menusHelper,
userSystemManager,
menuBar,
workbench,
permissionTreeSetup,
adminPageHelper,
navTreeDefinitions,
navigationManager,
navigationExplorerScreen));
doNothing().when(kieWorkbenchEntryPoint).hideLoadingPopup();
when(navigationExplorerScreen.getNavTreeEditor()).thenReturn(navTreeEditor);
}
@Test
public void initTest() {
kieWorkbenchEntryPoint.init();
verify(workbench).addStartupBlocker(KieWorkbenchEntryPoint.class);
verify(navTreeEditor).setMaxLevels(NavTreeDefinitions.GROUP_WORKBENCH,
2);
}
@Test
public void testInitializeWorkbench(){
kieWorkbenchEntryPoint.initializeWorkbench();
verify(permissionTreeSetup).configureTree();
}
@Test
public void setupMenuTest() {
kieWorkbenchEntryPoint.setupMenu();
verify(menuBar).addMenus(any());
verify(menusHelper).addUtilitiesMenuItems();
verify(workbench).removeStartupBlocker(KieWorkbenchEntryPoint.class);
}
@Test
public void defaultNavTreeTest() {
kieWorkbenchEntryPoint.setupMenu();
NavTree navTree = navigationManager.getNavTree();
NavGroup workbench = (NavGroup) navTree.getItemById(NavTreeDefinitions.GROUP_WORKBENCH);
NavGroup design = (NavGroup) navTree.getItemById(NavTreeDefinitions.GROUP_DESIGN);
NavItem pages = navTree.getItemById(NavTreeDefinitions.ENTRY_PAGES);
NavGroup deploy = (NavGroup) navTree.getItemById(NavTreeDefinitions.GROUP_DEPLOY);
NavItem execServers = navTree.getItemById(NavTreeDefinitions.ENTRY_EXECUTION_SERVERS);
NavGroup manage = (NavGroup) navTree.getItemById(NavTreeDefinitions.GROUP_MANAGE);
NavItem processDef = navTree.getItemById(NavTreeDefinitions.ENTRY_PROCESS_DEFINITIONS);
NavItem processInst = navTree.getItemById(NavTreeDefinitions.ENTRY_PROCESS_INSTANCES);
NavItem taskAdmin = navTree.getItemById(NavTreeDefinitions.ENTRY_ADMINISTRATION_TASKS);
NavItem jobs = navTree.getItemById(NavTreeDefinitions.ENTRY_JOBS);
NavItem executionErrors = navTree.getItemById(NavTreeDefinitions.ENTRY_EXECUTION_ERRORS);
NavGroup track = (NavGroup) navTree.getItemById(NavTreeDefinitions.GROUP_TRACK);
NavItem tasks = navTree.getItemById(NavTreeDefinitions.ENTRY_TASKS_LIST);
NavItem processDashboard = navTree.getItemById(NavTreeDefinitions.ENTRY_PROCESS_DASHBOARD);
NavItem taskDashboard = navTree.getItemById(NavTreeDefinitions.ENTRY_TASK_DASHBOARD);
assertNotNull(workbench);
assertNotNull(design);
assertNotNull(deploy);
assertNotNull(manage);
assertNotNull(track);
assertEquals(design.getParent(),
workbench);
assertEquals(deploy.getParent(),
workbench);
assertEquals(manage.getParent(),
workbench);
assertEquals(track.getParent(),
workbench);
assertNotNull(pages);
assertEquals(pages.getParent(),
design);
assertNotNull(execServers);
assertEquals(execServers.getParent(),
deploy);
assertNotNull(processDef);
assertNotNull(processInst);
assertNotNull(taskAdmin);
assertNotNull(jobs);
assertNotNull(executionErrors);
assertEquals(processDef.getParent(),
manage);
assertEquals(processInst.getParent(),
manage);
assertEquals(taskAdmin.getParent(),
manage);
assertEquals(jobs.getParent(),
manage);
assertEquals(executionErrors.getParent(),
manage);
assertNotNull(tasks);
assertNotNull(processDashboard);
assertNotNull(taskDashboard);
assertEquals(tasks.getParent(),
track);
assertEquals(processDashboard.getParent(),
track);
assertEquals(taskDashboard.getParent(),
track);
assertFalse(design.isModifiable());
assertFalse(pages.isModifiable());
}
}
| apache-2.0 |
googleads/google-ads-java | google-ads-stubs-v9/src/main/java/com/google/ads/googleads/v9/common/UserListNumberRuleItemInfo.java | 25298 | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v9/common/user_lists.proto
package com.google.ads.googleads.v9.common;
/**
* <pre>
* A rule item composed of a number operation.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.common.UserListNumberRuleItemInfo}
*/
public final class UserListNumberRuleItemInfo extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v9.common.UserListNumberRuleItemInfo)
UserListNumberRuleItemInfoOrBuilder {
private static final long serialVersionUID = 0L;
// Use UserListNumberRuleItemInfo.newBuilder() to construct.
private UserListNumberRuleItemInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UserListNumberRuleItemInfo() {
operator_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new UserListNumberRuleItemInfo();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private UserListNumberRuleItemInfo(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
int rawValue = input.readEnum();
operator_ = rawValue;
break;
}
case 25: {
bitField0_ |= 0x00000001;
value_ = input.readDouble();
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.common.UserListsProto.internal_static_google_ads_googleads_v9_common_UserListNumberRuleItemInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.common.UserListsProto.internal_static_google_ads_googleads_v9_common_UserListNumberRuleItemInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo.class, com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo.Builder.class);
}
private int bitField0_;
public static final int OPERATOR_FIELD_NUMBER = 1;
private int operator_;
/**
* <pre>
* Number comparison operator.
* This field is required and must be populated when creating a new number
* rule item.
* </pre>
*
* <code>.google.ads.googleads.v9.enums.UserListNumberRuleItemOperatorEnum.UserListNumberRuleItemOperator operator = 1;</code>
* @return The enum numeric value on the wire for operator.
*/
@java.lang.Override public int getOperatorValue() {
return operator_;
}
/**
* <pre>
* Number comparison operator.
* This field is required and must be populated when creating a new number
* rule item.
* </pre>
*
* <code>.google.ads.googleads.v9.enums.UserListNumberRuleItemOperatorEnum.UserListNumberRuleItemOperator operator = 1;</code>
* @return The operator.
*/
@java.lang.Override public com.google.ads.googleads.v9.enums.UserListNumberRuleItemOperatorEnum.UserListNumberRuleItemOperator getOperator() {
@SuppressWarnings("deprecation")
com.google.ads.googleads.v9.enums.UserListNumberRuleItemOperatorEnum.UserListNumberRuleItemOperator result = com.google.ads.googleads.v9.enums.UserListNumberRuleItemOperatorEnum.UserListNumberRuleItemOperator.valueOf(operator_);
return result == null ? com.google.ads.googleads.v9.enums.UserListNumberRuleItemOperatorEnum.UserListNumberRuleItemOperator.UNRECOGNIZED : result;
}
public static final int VALUE_FIELD_NUMBER = 3;
private double value_;
/**
* <pre>
* Number value to be compared with the variable.
* This field is required and must be populated when creating a new number
* rule item.
* </pre>
*
* <code>optional double value = 3;</code>
* @return Whether the value field is set.
*/
@java.lang.Override
public boolean hasValue() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Number value to be compared with the variable.
* This field is required and must be populated when creating a new number
* rule item.
* </pre>
*
* <code>optional double value = 3;</code>
* @return The value.
*/
@java.lang.Override
public double getValue() {
return value_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (operator_ != com.google.ads.googleads.v9.enums.UserListNumberRuleItemOperatorEnum.UserListNumberRuleItemOperator.UNSPECIFIED.getNumber()) {
output.writeEnum(1, operator_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeDouble(3, value_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (operator_ != com.google.ads.googleads.v9.enums.UserListNumberRuleItemOperatorEnum.UserListNumberRuleItemOperator.UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, operator_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeDoubleSize(3, value_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo)) {
return super.equals(obj);
}
com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo other = (com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo) obj;
if (operator_ != other.operator_) return false;
if (hasValue() != other.hasValue()) return false;
if (hasValue()) {
if (java.lang.Double.doubleToLongBits(getValue())
!= java.lang.Double.doubleToLongBits(
other.getValue())) return false;
}
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + OPERATOR_FIELD_NUMBER;
hash = (53 * hash) + operator_;
if (hasValue()) {
hash = (37 * hash) + VALUE_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
java.lang.Double.doubleToLongBits(getValue()));
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A rule item composed of a number operation.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v9.common.UserListNumberRuleItemInfo}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v9.common.UserListNumberRuleItemInfo)
com.google.ads.googleads.v9.common.UserListNumberRuleItemInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v9.common.UserListsProto.internal_static_google_ads_googleads_v9_common_UserListNumberRuleItemInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v9.common.UserListsProto.internal_static_google_ads_googleads_v9_common_UserListNumberRuleItemInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo.class, com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo.Builder.class);
}
// Construct using com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
operator_ = 0;
value_ = 0D;
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v9.common.UserListsProto.internal_static_google_ads_googleads_v9_common_UserListNumberRuleItemInfo_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo getDefaultInstanceForType() {
return com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo build() {
com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo buildPartial() {
com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo result = new com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
result.operator_ = operator_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.value_ = value_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo) {
return mergeFrom((com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo other) {
if (other == com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo.getDefaultInstance()) return this;
if (other.operator_ != 0) {
setOperatorValue(other.getOperatorValue());
}
if (other.hasValue()) {
setValue(other.getValue());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int operator_ = 0;
/**
* <pre>
* Number comparison operator.
* This field is required and must be populated when creating a new number
* rule item.
* </pre>
*
* <code>.google.ads.googleads.v9.enums.UserListNumberRuleItemOperatorEnum.UserListNumberRuleItemOperator operator = 1;</code>
* @return The enum numeric value on the wire for operator.
*/
@java.lang.Override public int getOperatorValue() {
return operator_;
}
/**
* <pre>
* Number comparison operator.
* This field is required and must be populated when creating a new number
* rule item.
* </pre>
*
* <code>.google.ads.googleads.v9.enums.UserListNumberRuleItemOperatorEnum.UserListNumberRuleItemOperator operator = 1;</code>
* @param value The enum numeric value on the wire for operator to set.
* @return This builder for chaining.
*/
public Builder setOperatorValue(int value) {
operator_ = value;
onChanged();
return this;
}
/**
* <pre>
* Number comparison operator.
* This field is required and must be populated when creating a new number
* rule item.
* </pre>
*
* <code>.google.ads.googleads.v9.enums.UserListNumberRuleItemOperatorEnum.UserListNumberRuleItemOperator operator = 1;</code>
* @return The operator.
*/
@java.lang.Override
public com.google.ads.googleads.v9.enums.UserListNumberRuleItemOperatorEnum.UserListNumberRuleItemOperator getOperator() {
@SuppressWarnings("deprecation")
com.google.ads.googleads.v9.enums.UserListNumberRuleItemOperatorEnum.UserListNumberRuleItemOperator result = com.google.ads.googleads.v9.enums.UserListNumberRuleItemOperatorEnum.UserListNumberRuleItemOperator.valueOf(operator_);
return result == null ? com.google.ads.googleads.v9.enums.UserListNumberRuleItemOperatorEnum.UserListNumberRuleItemOperator.UNRECOGNIZED : result;
}
/**
* <pre>
* Number comparison operator.
* This field is required and must be populated when creating a new number
* rule item.
* </pre>
*
* <code>.google.ads.googleads.v9.enums.UserListNumberRuleItemOperatorEnum.UserListNumberRuleItemOperator operator = 1;</code>
* @param value The operator to set.
* @return This builder for chaining.
*/
public Builder setOperator(com.google.ads.googleads.v9.enums.UserListNumberRuleItemOperatorEnum.UserListNumberRuleItemOperator value) {
if (value == null) {
throw new NullPointerException();
}
operator_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* Number comparison operator.
* This field is required and must be populated when creating a new number
* rule item.
* </pre>
*
* <code>.google.ads.googleads.v9.enums.UserListNumberRuleItemOperatorEnum.UserListNumberRuleItemOperator operator = 1;</code>
* @return This builder for chaining.
*/
public Builder clearOperator() {
operator_ = 0;
onChanged();
return this;
}
private double value_ ;
/**
* <pre>
* Number value to be compared with the variable.
* This field is required and must be populated when creating a new number
* rule item.
* </pre>
*
* <code>optional double value = 3;</code>
* @return Whether the value field is set.
*/
@java.lang.Override
public boolean hasValue() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* Number value to be compared with the variable.
* This field is required and must be populated when creating a new number
* rule item.
* </pre>
*
* <code>optional double value = 3;</code>
* @return The value.
*/
@java.lang.Override
public double getValue() {
return value_;
}
/**
* <pre>
* Number value to be compared with the variable.
* This field is required and must be populated when creating a new number
* rule item.
* </pre>
*
* <code>optional double value = 3;</code>
* @param value The value to set.
* @return This builder for chaining.
*/
public Builder setValue(double value) {
bitField0_ |= 0x00000001;
value_ = value;
onChanged();
return this;
}
/**
* <pre>
* Number value to be compared with the variable.
* This field is required and must be populated when creating a new number
* rule item.
* </pre>
*
* <code>optional double value = 3;</code>
* @return This builder for chaining.
*/
public Builder clearValue() {
bitField0_ = (bitField0_ & ~0x00000001);
value_ = 0D;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v9.common.UserListNumberRuleItemInfo)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v9.common.UserListNumberRuleItemInfo)
private static final com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo();
}
public static com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UserListNumberRuleItemInfo>
PARSER = new com.google.protobuf.AbstractParser<UserListNumberRuleItemInfo>() {
@java.lang.Override
public UserListNumberRuleItemInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new UserListNumberRuleItemInfo(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<UserListNumberRuleItemInfo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UserListNumberRuleItemInfo> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v9.common.UserListNumberRuleItemInfo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| apache-2.0 |
hugo-andrade/bopepo-fork-litio | bobepo-fork-litio/src/br/com/nordestefomento/jrimum/bopepo/campolivre/guia/CampoLivreUtil.java | 2785 | /**
*
*/
package br.com.nordestefomento.jrimum.bopepo.campolivre.guia;
import org.apache.commons.lang.StringUtils;
import br.com.nordestefomento.jrimum.domkee.financeiro.banco.febraban.guia.TipoSeguimento;
/**
* @author misael
*
* @since 0.3
*
* @version 0.3
*
*/
public class CampoLivreUtil {
public static Integer getTamanhoCorreto(TipoSeguimento tipoSeguimento) {
Integer tamanhoCorreto = null;
if (tipoSeguimento == TipoSeguimento.CARNES_E_ASSEMELHADOS_OU_DEMAIS)
tamanhoCorreto = 21;
else
tamanhoCorreto = 25;
return tamanhoCorreto;
}
public static boolean tamanhoEstaCorreto(CampoLivre campoLivre, TipoSeguimento tipoSeguimento) {
return tamanhoEstaCorreto(campoLivre.write(), tipoSeguimento);
}
public static boolean tamanhoEstaCorreto(String campoLivreStr, TipoSeguimento tipoSeguimento) {
return ( campoLivreStr.length() == getTamanhoCorreto(tipoSeguimento) );
}
public static boolean existeEspacoEmBranco(CampoLivre campoLivre, TipoSeguimento tipoSeguimento) {
return existeEspacoEmBranco(campoLivre.write(), tipoSeguimento);
}
public static boolean existeEspacoEmBranco(String campoLivreStr, TipoSeguimento tipoSeguimento) {
int tamanhoAtual = campoLivreStr.length();
return !(StringUtils.remove(campoLivreStr, ' ').length() == tamanhoAtual);
}
public static boolean naoExisteEspacoEmBranco(CampoLivre campoLivre, TipoSeguimento tipoSeguimento) {
return naoExisteEspacoEmBranco(campoLivre.write(), tipoSeguimento);
}
public static boolean naoExisteEspacoEmBranco(String campoLivreStr, TipoSeguimento tipoSeguimento) {
return (StringUtils.remove(campoLivreStr, ' ').length() == getTamanhoCorreto(tipoSeguimento));
}
public static void validar(CampoLivre campoLivre, TipoSeguimento tipoSeguimento) throws CampoLivreException {
int tamanhoAtual = campoLivre.write().length();
int tamanhoEsperado = getTamanhoCorreto(tipoSeguimento);
StringBuilder msgErro = new StringBuilder();
if ( !tamanhoEstaCorreto(campoLivre, tipoSeguimento) ) {
if (tamanhoAtual > tamanhoEsperado)
msgErro.append("O tamanho do campo livre gerado [" + tamanhoAtual + "] é maior que o esperado [" + tamanhoEsperado + "] para o segmento \"" + tipoSeguimento.getCodigo() + "-" + tipoSeguimento.getDescricao() + "\".");
else
msgErro.append("O tamanho do campo livre gerado [" + tamanhoAtual + "] é menor que o esperado [" + tamanhoEsperado + "] para o segmento \"" + tipoSeguimento.getCodigo() + "-" + tipoSeguimento.getDescricao() + "\".");
}
if (existeEspacoEmBranco(campoLivre, tipoSeguimento))
msgErro.append("O campo livre possui espaços em branco, e isto não pode ocorrer.");
if ( msgErro.length() > 0 )
throw new CampoLivreException(msgErro.toString());
}
}
| apache-2.0 |
crate/crate | server/src/main/java/io/crate/execution/jobs/DistResultRXTask.java | 4076 | /*
* Licensed to Crate.io GmbH ("Crate") under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Crate licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial agreement.
*/
package io.crate.execution.jobs;
import io.crate.breaker.RamAccounting;
import io.crate.exceptions.Exceptions;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.concurrent.CompletableFuture;
/**
* A {@link DownstreamRXTask} which receives paged buckets from upstreams
* and forwards the merged bucket results to the consumers for further processing.
*/
public class DistResultRXTask implements Task, DownstreamRXTask {
private final int id;
private final String name;
private final int numBuckets;
private final PageBucketReceiver pageBucketReceiver;
private final CompletableFuture<Void> completionFuture;
private final RamAccounting ramAccounting;
private long totalBytesUsed = -1;
public DistResultRXTask(int id,
String name,
PageBucketReceiver pageBucketReceiver,
RamAccounting ramAccounting,
int numBuckets) {
this.id = id;
this.name = name;
this.numBuckets = numBuckets;
this.pageBucketReceiver = pageBucketReceiver;
this.ramAccounting = ramAccounting;
this.completionFuture = pageBucketReceiver.completionFuture().handle((result, ex) -> {
totalBytesUsed = ramAccounting.totalBytes();
if (ex instanceof IllegalStateException) {
kill(ex);
}
if (ex == null) {
return null;
} else {
throw Exceptions.toRuntimeException(ex);
}
});
}
@Override
public void kill(@Nonnull Throwable t) {
pageBucketReceiver.kill(t);
}
@Override
public void start() {
// E.g. If the upstreamPhase is a collectPhase for a partitioned table without any partitions
// there won't be any executionNodes for that collectPhase
// -> no upstreams -> just finish
if (numBuckets == 0) {
pageBucketReceiver.consumeRows();
}
}
@Override
public String name() {
return name;
}
@Override
public int id() {
return id;
}
@Override
public String toString() {
return "DistResultRXTask{" +
"id=" + id() +
", numBuckets=" + numBuckets +
", isDone=" + completionFuture.isDone() +
'}';
}
/**
* The default behavior is to receive all upstream buckets,
* regardless of the input id. For a {@link DownstreamRXTask}
* which uses the inputId, see {@link JoinTask}.
*/
@Nullable
@Override
public PageBucketReceiver getBucketReceiver(byte inputId) {
return pageBucketReceiver;
}
@Override
public CompletableFuture<Void> completionFuture() {
return completionFuture;
}
@Override
public long bytesUsed() {
if (totalBytesUsed == -1) {
return ramAccounting.totalBytes();
} else {
return totalBytesUsed;
}
}
}
| apache-2.0 |
hazendaz/assertj-core | src/test/java/org/assertj/core/api/byte_/ByteAssert_isCloseToPercentage_byte_Test.java | 1366 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2021 the original author or authors.
*/
package org.assertj.core.api.byte_;
import org.assertj.core.api.ByteAssert;
import org.assertj.core.api.ByteAssertBaseTest;
import org.assertj.core.data.Percentage;
import static org.assertj.core.data.Percentage.withPercentage;
import static org.mockito.Mockito.verify;
class ByteAssert_isCloseToPercentage_byte_Test extends ByteAssertBaseTest {
private final Percentage percentage = withPercentage((byte) 5);
private final Byte value = 10;
@Override
protected ByteAssert invoke_api_method() {
return assertions.isCloseTo(value, percentage);
}
@Override
protected void verify_internal_effects() {
verify(bytes).assertIsCloseToPercentage(getInfo(assertions), getActual(assertions), value, percentage);
}
}
| apache-2.0 |
yslin/android-zodlin | examples/ActionBar/app/src/main/java/info/androidhive/actionbar/MainActivity.java | 21769 | package info.androidhive.actionbar;
import info.androidhive.CollectionDemoActivity;
import info.androidhive.actionbar.model.SpinnerNavItem;
import info.androidhive.info.actionbar.adapter.TitleNavigationAdapter;
import java.lang.reflect.Field;
import java.util.ArrayList;
import android.app.ActionBar;
import android.app.FragmentTransaction;
import android.app.SearchManager;
import android.app.SearchableInfo;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.os.AsyncTask;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentPagerAdapter;
import android.support.v4.view.ViewPager;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.AutoCompleteTextView;
import android.widget.ImageView;
import android.widget.SearchView;
import android.widget.SimpleCursorAdapter;
import android.widget.TextView;
public class MainActivity extends FragmentActivity implements
ActionBar.OnNavigationListener, ActionBar.TabListener {
private static final String TAG = "MainActivity";
// action bar
private ActionBar mActionBar;
// Title navigation Spinner data
private ArrayList<SpinnerNavItem> mNavSpinner;
// Navigation adapter
private TitleNavigationAdapter mTitleNavigationAdapter;
// Menu search
private SearchView mSearchView;
private AutoCompleteTextView mAutoCompleteTextView;
private SimpleCursorAdapter mSimpleCursorAdapter;
private ArrayAdapter<String> mArrayAdapter;
// Refresh menu item
private MenuItem mRefreshMenuItem;
/**
* The {@link android.support.v4.view.PagerAdapter} that will provide fragments for each of the
* three primary sections of the app. We use a {@link android.support.v4.app.FragmentPagerAdapter}
* derivative, which will keep every loaded fragment in memory. If this becomes too memory
* intensive, it may be best to switch to a {@link android.support.v4.app.FragmentStatePagerAdapter}.
*/
AppSectionsPagerAdapter mAppSectionsPagerAdapter;
/**
* The {@link ViewPager} that will display the three primary sections of the app, one at a
* time. Show action bar navigation mode with tab.
*/
ViewPager mViewPager;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mActionBar = getActionBar();
// Hide the action bar title
mActionBar.setDisplayShowTitleEnabled(false);
// Spinner Navigation / Tab Navigation
setupListNavigation();
setupTabNavigation();
// Changing the action bar icon
// mActionBar.setIcon(R.drawable.ico_actionbar);
}
@Override
protected void onStart() {
super.onStart();
SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(this);
String navigationMode = sharedPreferences.getString(getString(R.string.config_navigation_key), "0");
setupNavigation(Integer.valueOf(navigationMode));
}
private void setupListNavigation() {
// Spinner title navigation data
mNavSpinner = new ArrayList<SpinnerNavItem>();
mNavSpinner.add(new SpinnerNavItem("Local", R.drawable.ic_location));
mNavSpinner.add(new SpinnerNavItem("My Places", R.drawable.ic_my_places));
mNavSpinner.add(new SpinnerNavItem("Checkins", R.drawable.ic_checkin));
mNavSpinner.add(new SpinnerNavItem("Latitude", R.drawable.ic_latitude));
// title drop down adapter
mTitleNavigationAdapter = new TitleNavigationAdapter(getApplicationContext(),
mNavSpinner);
// assigning the spinner navigation
mActionBar.setListNavigationCallbacks(mTitleNavigationAdapter, this);
}
private void setupTabNavigation() {
// mActionBar.addTab(mActionBar.newTab().setText("Tab 1").setTabListener(this));
// mActionBar.addTab(mActionBar.newTab().setText("Tab 2").setTabListener(this));
// mActionBar.addTab(mActionBar.newTab().setText("Tab 3").setTabListener(this));
// Create the adapter that will return a fragment for each of the three primary sections
// of the app.
mAppSectionsPagerAdapter = new AppSectionsPagerAdapter(getSupportFragmentManager());
// Set up the ViewPager, attaching the adapter and setting up a listener for when the
// user swipes between sections.
mViewPager = (ViewPager) findViewById(R.id.pager);
mViewPager.setAdapter(mAppSectionsPagerAdapter);
mViewPager.setOnPageChangeListener(new ViewPager.SimpleOnPageChangeListener() {
@Override
public void onPageSelected(int position) {
// When swiping between different app sections, select the corresponding tab.
// We can also use ActionBar.Tab#select() to do this if we have a reference to the
// Tab.
mActionBar.setSelectedNavigationItem(position);
}
});
// For each of the sections in the app, add a tab to the action bar.
for (int i = 0; i < mAppSectionsPagerAdapter.getCount(); i++) {
// Create a tab with text corresponding to the page title defined by the adapter.
// Also specify this Activity object, which implements the TabListener interface, as the
// listener for when this tab is selected.
int resId = android.R.drawable.ic_menu_agenda;
switch (i) {
case 0:
resId = android.R.drawable.ic_menu_search;
break;
case 1:
resId = android.R.drawable.ic_menu_call;
break;
case 2:
resId = android.R.drawable.ic_menu_crop;
break;
}
mActionBar.addTab(mActionBar.newTab()
.setIcon(resId)
.setText(mAppSectionsPagerAdapter.getPageTitle(i))
.setTabListener(this));
}
}
private void setupNavigation(int mode) {
switch (mode) {
case ActionBar.NAVIGATION_MODE_STANDARD:
mActionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD);
break;
case ActionBar.NAVIGATION_MODE_LIST:
// Enabling Spinner dropdown navigation
mActionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_LIST);
break;
case ActionBar.NAVIGATION_MODE_TABS:
// Enabling Tab navigation
mActionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_TABS);
break;
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.activity_main_actions, menu);
MenuItem searchItem = menu.findItem(R.id.action_search);
setupSearchView(searchItem);
return super.onCreateOptionsMenu(menu);
}
protected boolean isAlwaysExpanded() {
return false;
}
// http://nlopez.io/how-to-style-the-actionbar-searchview-programmatically/
private void setupSearchView(MenuItem searchItem) {
mSearchView = (SearchView) searchItem.getActionView();
// 1. Config search view appearance
mSearchView.setQueryHint(getString(R.string.search_hint));
// 设置true后,右边会出现一个箭头按钮。如果用户没有输入,就不会触发提交(submit)事件
mSearchView.setSubmitButtonEnabled(true);
// Modify default style via java reflection, reference android/search_view.xml
try {
Field field = mSearchView.getClass().getDeclaredField("mSubmitButton");
field.setAccessible(true);
ImageView iv = (ImageView) field.get(mSearchView);
iv.setImageDrawable(this.getResources().getDrawable(android.R.drawable.ic_btn_speak_now));
} catch (Exception e) {
e.printStackTrace();
}
if (isAlwaysExpanded()) {
// 设置该搜索框默认是否自动缩小为图标。
mSearchView.setIconifiedByDefault(false);
} else {
// 初始是否已经是展开的状态
// 写上此句后searchView初始展开的,也就是是可以点击输入的状态,
// 如果不写,那么就需要点击下放大镜,才能展开出现输入框
mSearchView.onActionViewExpanded();
searchItem.setShowAsActionFlags(MenuItem.SHOW_AS_ACTION_IF_ROOM
| MenuItem.SHOW_AS_ACTION_COLLAPSE_ACTION_VIEW);
}
// 2. Show application matching search text
SearchManager searchManager = (SearchManager) getSystemService(Context.SEARCH_SERVICE);
if (searchManager != null) {
// Associate searchable configuration with the SearchView
SearchableInfo info = searchManager.getSearchableInfo(getComponentName());
//// List<SearchableInfo> searchables = searchManager
//// .getSearchablesInGlobalSearch();
//// for (SearchableInfo inf : searchables) {
//// if (inf.getSuggestAuthority() != null
//// && inf.getSuggestAuthority().startsWith("applications")) {
//// info = inf;
//// }
//// }
mSearchView.setSearchableInfo(info);
}
// 3.
Cursor cursor = getTestCursor();
mSimpleCursorAdapter = new SimpleCursorAdapter(this,
R.layout.mytextview, cursor, new String[] { "tb_name" },
new int[] { R.id.textview });
String[] item = new String[] {"test", "target", "total", "title", "turtle"};
mArrayAdapter = new ArrayAdapter<String>(this,
android.R.layout.simple_expandable_list_item_1, item);
mSearchView.setSuggestionsAdapter(mSimpleCursorAdapter);
try {
Field field = mSearchView.getClass().getDeclaredField("mQueryTextView");
field.setAccessible(true);
mAutoCompleteTextView = (AutoCompleteTextView) field.get(mSearchView);
mAutoCompleteTextView.setAdapter(mArrayAdapter);
mAutoCompleteTextView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
mAutoCompleteTextView.setText(mArrayAdapter.getItem(position).toString());
}
});
} catch (Exception e) {
e.printStackTrace();
}
// 4. Get query text
mSearchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String query) {
Log.d(TAG, "onQueryTextSubmit: " + query);
return false;
}
@Override
public boolean onQueryTextChange(String newText) {
Log.d(TAG, "onQueryTextChange: " + newText);
return false;
}
});
}
// 添加suggestion需要的数据
public Cursor getTestCursor() {
SQLiteDatabase db = SQLiteDatabase.openOrCreateDatabase(
this.getFilesDir() + "/search_suggestion.db3", null);
Cursor cursor = null;
try {
String insertSql = "insert into tb_test values (null,?,?)";
db.execSQL(insertSql, new Object[] { "test", 1 });
db.execSQL(insertSql, new Object[] { "target", 2 });
db.execSQL(insertSql, new Object[] { "total", 3 });
db.execSQL(insertSql, new Object[] { "title", 4 });
db.execSQL(insertSql, new Object[] { "turtle", 5 });
String querySql = "select * from tb_test";
cursor = db.rawQuery(querySql, null);
} catch (Exception e) {
String sql = "create table tb_test (_id integer primary key autoincrement,tb_name varchar(20),tb_age integer)";
db.execSQL(sql);
String insertSql = "insert into tb_test values (null,?,?)";
db.execSQL(insertSql, new Object[] { "test", 1 });
db.execSQL(insertSql, new Object[] { "target", 2 });
db.execSQL(insertSql, new Object[] { "total", 3 });
db.execSQL(insertSql, new Object[] { "title", 4 });
db.execSQL(insertSql, new Object[] { "turtle", 5 });
String querySql = "select * from tb_test";
cursor = db.rawQuery(querySql, null);
}
return cursor;
}
/**
* On selecting action bar icons
* */
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Take appropriate action for each action item click
switch (item.getItemId()) {
case R.id.action_search:
// search action
return true;
case R.id.action_location_found:
// location found
LocationFound();
return true;
case R.id.action_refresh:
// refresh
mRefreshMenuItem = item;
// load the data from server
new SyncData().execute();
return true;
case R.id.action_help:
// help action
return true;
case R.id.action_check_updates:
// check for updates action
return true;
case R.id.action_config:
// config settings
Intent intent = new Intent(this, ConfigActivity.class);
startActivity(intent);
return true;
default:
return super.onOptionsItemSelected(item);
}
}
/**
* Launching new activity
* */
private void LocationFound() {
Intent i = new Intent(MainActivity.this, LocationFound.class);
startActivity(i);
}
/*
* Actionbar navigation list item select listener
* Implemented from ActionBar.OnNavigationListener
*/
@Override
public boolean onNavigationItemSelected(int itemPosition, long itemId) {
// Action to be taken after selecting a spinner item
Log.d(TAG, "onNavigationItemSelected: (itemPosition,itemId) = (" + itemPosition
+ "," + itemId + ")");
// Change ViewPager's Fragment
mViewPager.setCurrentItem(itemPosition);
return false;
}
// Implemented from ActionBar.TabListener
@Override
public void onTabSelected(ActionBar.Tab tab, FragmentTransaction fragmentTransaction) {
// This is called when a tab is selected.
// When the given tab is selected, switch to the corresponding page in the ViewPager.
mViewPager.setCurrentItem(tab.getPosition());
}
// Implemented from ActionBar.TabListener
@Override
public void onTabUnselected(ActionBar.Tab tab, FragmentTransaction fragmentTransaction) {
// This is called when a previously selected tab is unselected.
}
// Implemented from ActionBar.TabListener
@Override
public void onTabReselected(ActionBar.Tab tab, FragmentTransaction fragmentTransaction) {
// This is called when a previously selected tab is selected again.
}
/**
* Async task to load the data from server
* **/
private class SyncData extends AsyncTask<String, Void, String> {
@Override
protected void onPreExecute() {
// set the progress bar view
mRefreshMenuItem.setActionView(R.layout.action_progressbar);
mRefreshMenuItem.expandActionView();
}
@Override
protected String doInBackground(String... params) {
// not making real request in this demo
// for now we use a timer to wait for sometime
try {
Thread.sleep(3000);
} catch (InterruptedException e) {
e.printStackTrace();
}
return null;
}
@Override
protected void onPostExecute(String result) {
mRefreshMenuItem.collapseActionView();
// remove the progress bar view
mRefreshMenuItem.setActionView(null);
}
};
/**
* A {@link FragmentPagerAdapter} that returns a fragment corresponding to one of the primary
* sections of the app.
*/
public static class AppSectionsPagerAdapter extends FragmentPagerAdapter {
public AppSectionsPagerAdapter(FragmentManager fm) {
super(fm);
}
@Override
public Fragment getItem(int i) {
switch (i) {
case 0:
// The first section of the app is the most interesting -- it offers
// a launchpad into the other demonstrations in this example application.
return new LaunchpadSectionFragment();
default:
// The other sections of the app are dummy placeholders.
Fragment fragment = new DummySectionFragment();
Bundle args = new Bundle();
args.putInt(DummySectionFragment.ARG_SECTION_NUMBER, i + 1);
fragment.setArguments(args);
return fragment;
}
}
@Override
public int getCount() {
return 3;
}
@Override
public CharSequence getPageTitle(int position) {
switch (position) {
case 0:
return "Launch";
case 1:
case 2:
return "Dummy" + (position + 1);
}
return "Section " + (position + 1);
}
}
/**
* A fragment that launches other parts of the demo application.
*/
public static class LaunchpadSectionFragment extends Fragment {
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_section_launchpad, container, false);
// Demonstration of a collection-browsing activity.
rootView.findViewById(R.id.demo_collection_button)
.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = new Intent(getActivity(), CollectionDemoActivity.class);
startActivity(intent);
}
});
// Demonstration of navigating to external activities.
rootView.findViewById(R.id.demo_external_activity)
.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
// Create an intent that asks the user to pick a photo, but using
// FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET, ensures that relaunching
// the application from the device home screen does not return
// to the external activity.
Intent supportActivityIntent = new Intent(getActivity(),
info.androidhive.actionbar.support.MainActivity.class);
supportActivityIntent.addFlags(
Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
startActivity(supportActivityIntent);
}
});
return rootView;
}
}
/**
* A dummy fragment representing a section of the app, but that simply displays dummy text.
*/
public static class DummySectionFragment extends Fragment {
public static final String ARG_SECTION_NUMBER = "section_number";
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_section_dummy, container, false);
Bundle args = getArguments();
((TextView) rootView.findViewById(android.R.id.text1)).setText(
getString(R.string.dummy_section_text, args.getInt(ARG_SECTION_NUMBER)));
return rootView;
}
}
}
| apache-2.0 |
uber/kafka-rest | src/main/java/io/confluent/kafkarest/SpoolThread.java | 17566 | /**
* Copyright 2015 Confluent Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package io.confluent.kafkarest;
import org.apache.flume.Context;
import org.apache.flume.Transaction;
import org.apache.flume.channel.file.FileChannel;
import org.apache.flume.channel.file.FileChannelConfiguration;
import org.apache.flume.conf.Configurables;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.errors.RetriableException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.util.ArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import io.confluent.kafkarest.entities.SpoolChannel;
import io.confluent.kafkarest.entities.SpoolMessage;
import io.confluent.kafkarest.entities.SpoolShard;
import com.codahale.metrics.Meter;
// The thread that produces all spooled records asynchronously.
class SpoolThread extends Thread {
private static final Logger log = LoggerFactory.getLogger(SpoolThread.class);
private final Meter producerSuccessMeter;
private final Meter producerFailureMeter;
// Records are appended to the queue channel from the outward facing threads
// and processed by this spool thread.
private final FileChannel queueChannel;
// Records are appended to the retry channel and later dequeued and enqueued
// back into the queue channel by this spool thread. Records are repeatedly
// tried until either successfully produced to the kafka cluster or if it gets
// preserved in the error channel.
private final FileChannel retryChannel;
// Records are appended to the error channel by this spool thread and no
// longer touched. Records can be revived by an external admin thread and
// re-appended to the queue channel.
private final FileChannel errorChannel;
private final KafkaProducer<byte[], byte[]> producer;
private final AtomicInteger consecutiveFailures;
private final int batchSize;
private final int batchTime;
private final int retryAttempts;
private final int retryBackoff;
private final int retryBatch;
// Admin thread can update this value to alter the error batch size to control
// how many records to revive from the error channel. Each time this value is
// set to a positive value, the spool thread will revive the specified amount
// of records and then reset it back to zero.
private final AtomicInteger errorBatch = new AtomicInteger(0);
// Admin thread can update this to prevent the spool thread from producing
// records until the specified timestamp.
private final AtomicLong queueResume = new AtomicLong(0);
private static FileChannel initChannel(File channelPath, int batchSize) {
channelPath.mkdirs();
Context context = new Context();
context.put(FileChannelConfiguration.MAX_FILE_SIZE,
String.valueOf(268435456));
context.put(FileChannelConfiguration.CHECKPOINT_DIR,
new File(channelPath, "checkpoint").getAbsolutePath());
context.put(FileChannelConfiguration.USE_DUAL_CHECKPOINTS,
String.valueOf(true));
context.put(FileChannelConfiguration.BACKUP_CHECKPOINT_DIR,
new File(channelPath, "checkpoint.backup").getAbsolutePath());
context.put(FileChannelConfiguration.DATA_DIRS,
new File(channelPath, "data").getAbsolutePath());
context.put(FileChannelConfiguration.FSYNC_PER_TXN,
String.valueOf(false)); // configure to skip corrupted events
// The following gives a much larger grace period to allow the disk to write its data.
// http://flume.apache.org/releases/content/1.5.0/apidocs/constant-values.html#org.apache.flume.channel.file.FileChannelConfiguration.DEFAULT_KEEP_ALIVE
// https://github.com/apache/flume/blob/trunk/flume-ng-channels/flume-file-channel/src/main/java/org/apache/flume/channel/file/FileChannel.java#L465
context.put(FileChannelConfiguration.KEEP_ALIVE,
String.valueOf(30));
context.put(FileChannelConfiguration.TRANSACTION_CAPACITY,
String.valueOf(batchSize));
FileChannel channel = new FileChannel();
channel.setName(channelPath.getAbsolutePath());
Configurables.configure(channel, context);
channel.start();
return channel;
}
public SpoolThread(String basePath, KafkaProducer<byte[], byte[]> producer,
Meter producerSuccessMeter, Meter producerFailureMeter,
AtomicInteger consecutiveFailures, int batchSize, int batchTime,
int retryAttempts, int retryBackoff, int retryBatch) {
super(basePath);
this.queueChannel = initChannel(new File(basePath, SpoolChannel.queue.toString()), batchSize);
this.retryChannel = initChannel(new File(basePath, SpoolChannel.retry.toString()), batchSize);
this.errorChannel = initChannel(new File(basePath, SpoolChannel.error.toString()), batchSize);
this.producer = producer;
this.producerSuccessMeter = producerSuccessMeter;
this.producerFailureMeter = producerFailureMeter;
this.consecutiveFailures = consecutiveFailures;
this.batchSize = batchSize;
this.batchTime = batchTime;
this.retryAttempts = Math.max(retryAttempts, 0);
this.retryBackoff = retryBackoff;
this.retryBatch = retryBatch;
}
public SpoolShard getInfo() {
return new SpoolShard(getName(), queueResume.get());
}
public void spoolRecord(SpoolRecord record) throws Exception {
Transaction transaction = queueChannel.getTransaction();
transaction.begin();
try {
record.put(queueChannel);
transaction.commit();
} catch (Exception e) {
transaction.rollback();
throw e;
} finally {
transaction.close();
}
}
public void suspendQueuedRecords(long timestamp) {
// Signal for the thread to not process any records in the queue channel
// until the specified timestamp.
queueResume.set(timestamp);
}
public ArrayList<SpoolMessage> peekErroredRecords(int count) throws Exception {
ArrayList<SpoolMessage> records = new ArrayList<SpoolMessage>();
Transaction transaction = errorChannel.getTransaction();
transaction.begin();
try {
for (int i = 0; i < count; ++i) {
SpoolRecord record = SpoolRecord.take(errorChannel);
if (record != null) {
records.add(new SpoolMessage(record.attempt, record.timestamp, record.payload.topic(),
record.payload.key(), record.payload.value()));
} else {
break;
}
}
} finally {
transaction.rollback();
transaction.close();
}
return records;
}
public void reviveErroredRecords(int count) {
// Signal for the next retry interval to also revive some records from the
// error channel.
errorBatch.set(count);
}
// This is a helper method to construct the callback object for the spool
// producer.
private Callback getCallback(final CountDownLatch latch, final SpoolRecord record) {
return new Callback() {
private void preserveRecord(FileChannel channel) throws Exception {
Transaction transaction = channel.getTransaction();
transaction.begin();
try {
record.put(channel);
transaction.commit();
} catch (Exception e) {
log.warn("Cannot preserve record: ", e);
transaction.rollback();
throw e;
} finally {
transaction.close();
}
}
public void onCompletion(RecordMetadata metadata, Exception ex) {
if (ex == null) {
log.trace("Produced record: ", metadata);
producerSuccessMeter.mark();
consecutiveFailures.set(0);
latch.countDown();
} else {
log.trace("Failed to produce record: ", metadata);
producerFailureMeter.mark();
consecutiveFailures.incrementAndGet();
try {
if (ex instanceof RetriableException &&
(record.attempt % (retryAttempts + 1)) != 0) {
log.warn("Cannot produce record: ", ex);
try {
preserveRecord(retryChannel);
} catch (Exception e) {
log.error("Cannot spool to retry channel: ", e);
// NOTE: It is not good to reach this point, but lets preserve
// the record to the error channel so the spool thread can
// continue to process other records.
preserveRecord(errorChannel);
}
} else {
log.error("Cannot produce record: ", ex);
log.trace("Cannot produce record: ", record);
// NOTE: Some non-retriable condition occured, lets preserve the
// record to the error channel so the thread can continue to
// process other records.
preserveRecord(errorChannel);
}
log.trace("Preserved record: ", metadata);
latch.countDown();
} catch (Exception e) {
// Swallow exception. Because the latch is not decremented, this
// spool thread will block on the irrecoverable error. The queue
// transaction will not commit and therefore cause data loss.
log.error("Cannot preserve record: ", e);
}
}
}
};
}
private void enqueueRecords(FileChannel sourceChannel, int count, long tsBucket) {
boolean done = count > 0;
while (!done) {
Transaction queueTransaction = queueChannel.getTransaction();
Transaction sourceTransaction = sourceChannel.getTransaction();
queueTransaction.begin();
sourceTransaction.begin();
try {
for (int i = 0; i < batchSize && !done; ++i) {
SpoolRecord record = SpoolRecord.take(sourceChannel);
if (record != null && record.timestamp / retryBackoff <= tsBucket) {
record.put(queueChannel);
done = --count > 0;
} else {
done = true;
}
}
queueTransaction.commit();
sourceTransaction.commit();
} catch (Exception e) {
sourceTransaction.rollback();
queueTransaction.rollback();
} finally {
sourceTransaction.close();
queueTransaction.close();
}
}
}
public void run() {
log.trace("Started thread for " + queueChannel.getName());
// Process records from queue channel until this thread is signaled to
// terminate.
boolean terminate = false;
long tsBucket = 0;
while (!terminate && !Thread.interrupted()) {
// Get the timestamp of this batch.
long ts = System.currentTimeMillis();
// Check if the thread is suppose to resume draining the queue channel.
if (ts < queueResume.get()) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
terminate = true;
}
continue;
}
// Set up the expected number of records in this batch.
CountDownLatch latch = new CountDownLatch(batchSize);
Transaction queueTransaction = queueChannel.getTransaction();
queueTransaction.begin();
try {
// NOTE: The following achieves a batching behavior while periodically
// updating the progress across the queue and error channels. If at any
// point in the following for-loop an exception were to occur, the
// entire batch will rollback in the queue channel and retried by the
// subsequent iteration.
for (int i = 0; i < batchSize; ++i) {
// The following takes a record from the queue channel and tries to
// produce to the kafka cluster. The queue transaction is kept open
// for the entire batch until the latch is fully drained by each of
// the corresponding callbacks.
SpoolRecord record = SpoolRecord.take(queueChannel);
if (record != null) {
// NOTE: Any unexpected failures from this point on will cause the
// inflight records to potentially be produced "at least once".
// However, it will guarantee no data loss.
producer.send(record.payload, getCallback(latch, record));
// If this batch takes too long to fill, cut it short and continue
// processing in the next batch. This is for preventing the queue
// transaction from being kept open too long due to inactivity.
if (System.currentTimeMillis() - ts >= batchTime) {
// The following will also indirectly terminate the for-loop.
while (++i < batchSize) {
latch.countDown();
}
}
} else {
// No more records currently in the queue channel. Sleep for a
// little while and continue as new batch.
long ms = batchTime - (System.currentTimeMillis() - ts);
if (ms > 0) {
try {
// The amount of time to sleep is arbitrary. We can make this a
// configurable property, but a not well picked value can cause
// significant delays. We might as well simply sleep until the
// max batch time while capped at 1 second.
Thread.sleep(Math.min(1000, ms));
} catch (InterruptedException e) {
// Delay the interruption and attempt to perform a clean thread
// termination.
terminate = true;
}
}
// Lets start a new batch so we can avoid the queue transaction
// being opened too long. The following will also indirecty
// terminate the for-loop.
while (i++ < batchSize) {
latch.countDown();
}
}
}
// The batch of records are underway. Wait for the async producer to
// acknowledge all inflight records have either been successfully
// produced to the kafka cluster, or inserted into the retry channel
// upon failures.
try {
if (terminate) {
// The spool thread has already been signaled to terminate. Lets
// spend as much time as possible to drain the records or until the
// main process terminates this thread.
latch.await();
} else if (!latch.await(Math.max(1000L, batchTime * 100), TimeUnit.MILLISECONDS)) {
// The amount of time to block before deciding to terminate the
// thread due to some very bad condition is arbitrary. Picking 100
// times the max batch time sounds reasonably high, yet won't be
// forever.
throw new Exception("Stuck waiting for producers");
}
} catch (InterruptedException e) {
// Delay the interruption and attempt to continue the clean thread
// termination logic.
terminate = true;
}
// NOTE: All records in this batch either have successfully reached the
// kafka cluster or have been persisted into the retry channel. Commit
// the queue transaction to reflect these messages have been completed.
// If an unexpected failure occurs before the commit, the records in
// this batch will be processed again by the subsequent iteration.
// Duplicate records may end up in the kafka cluster, but no data loss
// will occur.
queueTransaction.commit();
} catch (Exception e) {
// Something has gone terribly wrong. Rollback and fail fast by bubbling
// up the error to prevent processing any further records.
log.error("Unexpected error: ", e);
queueTransaction.rollback();
terminate = true;
} finally {
queueTransaction.close();
}
if (!terminate && ts / retryBackoff > tsBucket) {
// We entered the next timestamp bucket based on the retry interval and
// need to move some records from the retry channel and possibly the
// error channel back to the queue channel.
tsBucket = ts / retryBackoff;
// Enqueue records from the retry channel older than this time bucket up
// to the specified number of records.
enqueueRecords(retryChannel, retryBatch, tsBucket);
// Enqueue records from the error channel up to the specified number of
// records.
enqueueRecords(errorChannel, errorBatch.getAndSet(0), Long.MAX_VALUE);
}
}
// The spool thread is about to terminate. Clean up the channels.
queueChannel.stop();
retryChannel.stop();
errorChannel.stop();
log.trace("Stopped thread for " + queueChannel.getName());
}
}
| apache-2.0 |
dylanswartz/nakamura | bundles/solr/src/main/java/org/sakaiproject/nakamura/solr/SolrServerServiceImpl.java | 4100 | package org.sakaiproject.nakamura.solr;
import java.io.IOException;
import java.util.Map;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Modified;
import org.apache.felix.scr.annotations.Property;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.ReferenceCardinality;
import org.apache.felix.scr.annotations.ReferencePolicy;
import org.apache.felix.scr.annotations.ReferenceStrategy;
import org.apache.felix.scr.annotations.References;
import org.apache.felix.scr.annotations.Service;
import org.apache.solr.client.solrj.SolrServer;
import org.sakaiproject.nakamura.api.solr.SolrClient;
import org.sakaiproject.nakamura.api.solr.SolrServerService;
import org.xml.sax.SAXException;
import com.google.common.collect.Maps;
@Component(immediate = true, metatype = true)
@Service(value = SolrServerService.class)
@References(
value={
@Reference(target="(client-name=embedded)",name="embeddedClient", cardinality=ReferenceCardinality.MANDATORY_UNARY,policy=ReferencePolicy.STATIC,strategy=ReferenceStrategy.EVENT,bind=SolrServerServiceImpl.BIND,unbind=SolrServerServiceImpl.UNBIND,referenceInterface=SolrClient.class),
@Reference(target="(client-name=remote)",name="remoteClient", cardinality=ReferenceCardinality.MANDATORY_UNARY,policy=ReferencePolicy.STATIC,strategy=ReferenceStrategy.EVENT,bind=SolrServerServiceImpl.BIND,unbind=SolrServerServiceImpl.UNBIND,referenceInterface=SolrClient.class),
@Reference(target="(client-name=multi)",name="splitClient", cardinality=ReferenceCardinality.MANDATORY_UNARY,policy=ReferencePolicy.STATIC,strategy=ReferenceStrategy.EVENT,bind=SolrServerServiceImpl.BIND,unbind=SolrServerServiceImpl.UNBIND,referenceInterface=SolrClient.class),
@Reference(target="(client-name=multiremote)",name="multiRemoteClient", cardinality=ReferenceCardinality.MANDATORY_UNARY,policy=ReferencePolicy.STATIC,strategy=ReferenceStrategy.EVENT,bind=SolrServerServiceImpl.BIND,unbind=SolrServerServiceImpl.UNBIND,referenceInterface=SolrClient.class),
@Reference(name="optionalClient", cardinality=ReferenceCardinality.OPTIONAL_MULTIPLE,policy=ReferencePolicy.DYNAMIC,strategy=ReferenceStrategy.EVENT,bind=SolrServerServiceImpl.BIND,unbind=SolrServerServiceImpl.UNBIND,referenceInterface=SolrClient.class)
})
public class SolrServerServiceImpl implements SolrServerService, SolrClientListener {
@Property(value=SolrClient.EMBEDDED, description="embedded|remote|multi|other")
private static final String SOLR_IMPL = "solr-impl";
public static final String BIND = "bind";
public static final String UNBIND = "unbind";
private SolrClient server;
private Map<String, SolrClient> servers = Maps.newConcurrentMap();
@Activate
public void activate(Map<String, Object> properties) throws IOException, ParserConfigurationException, SAXException {
modified(properties);
}
@Modified
public void modified(Map<String, Object> properties) throws IOException, ParserConfigurationException, SAXException {
String serverImplName = toString(properties.get(SOLR_IMPL), SolrClient.EMBEDDED);
SolrClient newServer = servers.get(serverImplName);
if ( newServer == null ) {
throw new RuntimeException("Cant locate the Solr implementation called "+serverImplName);
}
newServer.enable(this);
if ( server != null ) {
server.disable();
}
server = newServer;
}
private String toString(Object object, String defaultValue) {
if ( object == null ) {
return defaultValue;
}
return String.valueOf(object);
}
public SolrServer getServer() {
return server.getServer();
}
public SolrServer getUpdateServer() {
return server.getUpdateServer();
}
public String getSolrHome() {
return server.getSolrHome();
}
public void bind(SolrClient client) {
servers.put(client.getName(), client);
}
public void unbind(SolrClient client) {
servers.remove(client.getName());
}
public void disabled() {
server = null;
}
}
| apache-2.0 |
dbrimley/hazelcast | hazelcast/src/test/java/com/hazelcast/concurrent/semaphore/SemaphoreAdvancedTest.java | 5958 | /*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.concurrent.semaphore;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.ISemaphore;
import com.hazelcast.test.HazelcastParallelClassRunner;
import com.hazelcast.test.HazelcastTestSupport;
import com.hazelcast.test.TestHazelcastInstanceFactory;
import com.hazelcast.test.annotation.ParallelTest;
import com.hazelcast.test.annotation.QuickTest;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import java.util.concurrent.CountDownLatch;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@RunWith(HazelcastParallelClassRunner.class)
@Category({QuickTest.class, ParallelTest.class})
public class SemaphoreAdvancedTest extends HazelcastTestSupport {
@Test(expected = IllegalStateException.class, timeout = 30000)
public void testAcquire_whenInstanceShutdown() throws InterruptedException {
HazelcastInstance hz = createHazelcastInstance();
final ISemaphore semaphore = hz.getSemaphore(randomString());
hz.shutdown();
semaphore.acquire();
}
@Test(timeout = 300000)
public void testSemaphoreWithFailures() throws InterruptedException {
final String semaphoreName = randomString();
final int k = 4;
final TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(k + 1);
final HazelcastInstance[] instances = factory.newInstances();
final ISemaphore semaphore = instances[k].getSemaphore(semaphoreName);
int initialPermits = 20;
semaphore.init(initialPermits);
for (int i = 0; i < k; i++) {
int rand = (int) (Math.random() * 5) + 1;
semaphore.acquire(rand);
initialPermits -= rand;
assertEquals(initialPermits, semaphore.availablePermits());
semaphore.release(rand);
initialPermits += rand;
assertEquals(initialPermits, semaphore.availablePermits());
instances[i].shutdown();
semaphore.acquire(rand);
initialPermits -= rand;
assertEquals(initialPermits, semaphore.availablePermits());
semaphore.release(rand);
initialPermits += rand;
assertEquals(initialPermits, semaphore.availablePermits());
}
}
@Test(timeout = 300000)
@Ignore(value = "Known issue in operation system. See: https://github.com/hazelcast/hazelcast/issues/11839")
public void testSemaphoreWithFailuresAndJoin() {
final String semaphoreName = randomString();
final TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(3);
final HazelcastInstance instance1 = factory.newHazelcastInstance();
final HazelcastInstance instance2 = factory.newHazelcastInstance();
final ISemaphore semaphore = instance1.getSemaphore(semaphoreName);
final CountDownLatch countDownLatch = new CountDownLatch(1);
assertTrue(semaphore.init(0));
final Thread thread = new Thread() {
public void run() {
for (int i = 0; i < 2; i++) {
try {
semaphore.acquire();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
countDownLatch.countDown();
}
};
thread.start();
instance2.shutdown();
semaphore.release();
HazelcastInstance instance3 = factory.newHazelcastInstance();
ISemaphore semaphore1 = instance3.getSemaphore(semaphoreName);
semaphore1.release();
assertOpenEventually(countDownLatch);
}
@Test(timeout = 300000)
public void testMutex() throws InterruptedException {
final String semaphoreName = randomString();
final int threadCount = 2;
final HazelcastInstance[] instances = createHazelcastInstanceFactory(threadCount).newInstances();
final CountDownLatch latch = new CountDownLatch(threadCount);
final int loopCount = 1000;
class Counter {
int count = 0;
void inc() {
count++;
}
int get() {
return count;
}
}
final Counter counter = new Counter();
assertTrue(instances[0].getSemaphore(semaphoreName).init(1));
for (int i = 0; i < threadCount; i++) {
final ISemaphore semaphore = instances[i].getSemaphore(semaphoreName);
new Thread() {
public void run() {
for (int j = 0; j < loopCount; j++) {
try {
semaphore.acquire();
sleepMillis((int) (Math.random() * 3));
counter.inc();
} catch (InterruptedException e) {
return;
} finally {
semaphore.release();
}
}
latch.countDown();
}
}.start();
}
assertOpenEventually(latch);
assertEquals(loopCount * threadCount, counter.get());
}
}
| apache-2.0 |
DSttr/SystemUI | SystemUI/src/com/android/systemui/qs/tiles/AmbientDisplayTile.java | 3530 | /*
* Copyright (C) 2015 The CyanogenMod Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.systemui.qs.tiles;
import android.content.Context;
import android.content.Intent;
import android.provider.Settings;
import android.provider.Settings.Secure;
import com.android.systemui.qs.SecureSetting;
import com.android.systemui.qs.QSTile;
import com.android.systemui.R;
import org.cyanogenmod.internal.logging.CMMetricsLogger;
/** Quick settings tile: Ambient Display **/
public class AmbientDisplayTile extends QSTile<QSTile.BooleanState> {
private static final Intent DISPLAY_SETTINGS = new Intent("android.settings.DISPLAY_SETTINGS");
private final SecureSetting mSetting;
public AmbientDisplayTile(Host host) {
super(host);
mSetting = new SecureSetting(mContext, mHandler, Secure.DOZE_ENABLED) {
@Override
protected void handleValueChanged(int value, boolean observedChange) {
handleRefreshState(value);
}
};
}
@Override
protected BooleanState newTileState() {
return new BooleanState();
}
@Override
protected void handleClick() {
setEnabled(!mState.value);
refreshState();
}
@Override
protected void handleLongClick() {
mHost.startActivityDismissingKeyguard(DISPLAY_SETTINGS);
}
private void setEnabled(boolean enabled) {
Settings.Secure.putInt(mContext.getContentResolver(),
Settings.Secure.DOZE_ENABLED,
enabled ? 1 : 0);
}
@Override
protected void handleUpdateState(BooleanState state, Object arg) {
final int value = arg instanceof Integer ? (Integer)arg : mSetting.getValue();
final boolean enable = value != 0;
state.value = enable;
state.visible = true;
state.label = mContext.getString(R.string.quick_settings_ambient_display_label);
if (enable) {
state.icon = ResourceIcon.get(R.drawable.ic_qs_ambientdisplay_on);
state.contentDescription = mContext.getString(
R.string.accessibility_quick_settings_ambient_display_on);
} else {
state.icon = ResourceIcon.get(R.drawable.ic_qs_ambientdisplay_off);
state.contentDescription = mContext.getString(
R.string.accessibility_quick_settings_ambient_display_off);
}
}
@Override
public int getMetricsCategory() {
return CMMetricsLogger.TILE_AMBIENT_DISPLAY;
}
@Override
protected String composeChangeAnnouncement() {
if (mState.value) {
return mContext.getString(
R.string.accessibility_quick_settings_ambient_display_changed_on);
} else {
return mContext.getString(
R.string.accessibility_quick_settings_ambient_display_changed_off);
}
}
@Override
public void setListening(boolean listening) {
// Do nothing
}
} | apache-2.0 |
FOC-framework/framework | foc/src/com/foc/depricatedUnit/Ton.java | 302 | /*
* Created on 01-Feb-2005
*/
package com.foc.depricatedUnit;
/**
* @author 01Barmaja
*/
public class Ton implements Unit {
public String getTitle() {
return "Ton";
}
public String getName() {
return "T";
}
public int getID() {
return Unit.TON;
}
} | apache-2.0 |
punkhorn/camel-upstream | tooling/apt/src/main/java/org/apache/camel/tools/apt/ConverterProcessor.java | 14658 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.tools.apt;
import java.io.Writer;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import javax.annotation.processing.RoundEnvironment;
import javax.annotation.processing.SupportedAnnotationTypes;
import javax.lang.model.element.AnnotationMirror;
import javax.lang.model.element.AnnotationValue;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.TypeElement;
import javax.lang.model.type.TypeMirror;
import javax.tools.Diagnostic;
import javax.tools.JavaFileObject;
@SupportedAnnotationTypes({"org.apache.camel.Converter"})
public class ConverterProcessor extends AbstractCamelAnnotationProcessor {
@Override
protected void doProcess(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) throws Exception {
if (this.processingEnv.getElementUtils().getTypeElement("org.apache.camel.impl.converter.CoreStaticTypeConverterLoader") != null) {
return;
}
// We're in tests, do not generate anything
if (this.processingEnv.getElementUtils().getTypeElement("org.apache.camel.converter.ObjectConverter") == null) {
return;
}
Comparator<TypeMirror> comparator = (o1, o2) -> processingEnv.getTypeUtils().isAssignable(o1, o2)
? -1 : processingEnv.getTypeUtils().isAssignable(o2, o1) ? +1 : o1.toString().compareTo(o2.toString());
Map<String, Map<TypeMirror, ExecutableElement>> converters = new TreeMap<>();
TypeElement converterAnnotationType = this.processingEnv.getElementUtils().getTypeElement("org.apache.camel.Converter");
for (Element element : roundEnv.getElementsAnnotatedWith(converterAnnotationType)) {
if (element.getKind() == ElementKind.METHOD) {
ExecutableElement ee = (ExecutableElement)element;
TypeMirror to = ee.getReturnType();
TypeMirror from = ee.getParameters().get(0).asType();
String fromStr = toString(from);
if (!fromStr.endsWith("[]")) {
TypeElement e = this.processingEnv.getElementUtils().getTypeElement(fromStr);
if (e != null) {
from = e.asType();
} else {
processingEnv.getMessager().printMessage(Diagnostic.Kind.NOTE, "Could not retrieve type element for " + fromStr);
}
}
converters.computeIfAbsent(toString(to), c -> new TreeMap<>(comparator)).put(from, ee);
}
}
TypeElement fallbackAnnotationType = this.processingEnv.getElementUtils().getTypeElement("org.apache.camel.FallbackConverter");
List<ExecutableElement> fallbackConverters = new ArrayList<>();
for (Element element : roundEnv.getElementsAnnotatedWith(fallbackAnnotationType)) {
if (element.getKind() == ElementKind.METHOD) {
ExecutableElement ee = (ExecutableElement)element;
fallbackConverters.add(ee);
}
}
String p = "org.apache.camel.impl.converter";
String c = "CoreStaticTypeConverterLoader";
JavaFileObject jfo = processingEnv.getFiler().createSourceFile(p + "." + c);
Set<String> converterClasses = new LinkedHashSet<>();
try (Writer writer = jfo.openWriter()) {
writer.append("package ").append(p).append(";\n");
writer.append("\n");
writer.append("import org.apache.camel.Exchange;\n");
writer.append("import org.apache.camel.TypeConversionException;\n");
writer.append("import org.apache.camel.TypeConverterLoaderException;\n");
writer.append("import org.apache.camel.spi.TypeConverterLoader;\n");
writer.append("import org.apache.camel.spi.TypeConverterRegistry;\n");
writer.append("import org.apache.camel.support.TypeConverterSupport;\n");
writer.append("\n");
writer.append("@SuppressWarnings(\"unchecked\")\n");
writer.append("public class ").append(c).append(" implements TypeConverterLoader {\n");
writer.append("\n");
writer.append(" public static final CoreStaticTypeConverterLoader INSTANCE = new CoreStaticTypeConverterLoader();\n");
writer.append("\n");
writer.append(" static abstract class SimpleTypeConverter extends TypeConverterSupport {\n");
writer.append(" private final boolean allowNull;\n");
writer.append("\n");
writer.append(" public SimpleTypeConverter(boolean allowNull) {\n");
writer.append(" this.allowNull = allowNull;\n");
writer.append(" }\n");
writer.append("\n");
writer.append(" @Override\n");
writer.append(" public boolean allowNull() {\n");
writer.append(" return allowNull;\n");
writer.append(" }\n");
writer.append("\n");
writer.append(" @Override\n");
writer.append(" public <T> T convertTo(Class<T> type, Exchange exchange, Object value) throws TypeConversionException {\n");
writer.append(" try {\n");
writer.append(" return (T) doConvert(exchange, value);\n");
writer.append(" } catch (TypeConversionException e) {\n");
writer.append(" throw e;\n");
writer.append(" } catch (Exception e) {\n");
writer.append(" throw new TypeConversionException(value, type, e);\n");
writer.append(" }\n");
writer.append(" }\n");
writer.append(" protected abstract Object doConvert(Exchange exchange, Object value) throws Exception;\n");
writer.append(" };\n");
writer.append("\n");
writer.append(" private DoubleMap<Class<?>, Class<?>, SimpleTypeConverter> converters = new DoubleMap<>(256);\n");
writer.append("\n");
writer.append(" private ").append(c).append("() {\n");
for (Map.Entry<String, Map<TypeMirror, ExecutableElement>> to : converters.entrySet()) {
for (Map.Entry<TypeMirror, ExecutableElement> from : to.getValue().entrySet()) {
boolean allowNull = false;
for (AnnotationMirror ann : from.getValue().getAnnotationMirrors()) {
if (ann.getAnnotationType().asElement() == converterAnnotationType) {
for (Map.Entry<? extends ExecutableElement, ? extends AnnotationValue> entry : ann.getElementValues().entrySet()) {
switch (entry.getKey().getSimpleName().toString()) {
case "allowNull":
allowNull = (Boolean)entry.getValue().getValue();
break;
default:
throw new IllegalStateException();
}
}
}
}
writer.append(" converters.put(").append(to.getKey()).append(".class").append(", ").append(toString(from.getKey()))
.append(".class, new SimpleTypeConverter(").append(Boolean.toString(allowNull)).append(") {\n");
writer.append(" @Override\n");
writer.append(" public Object doConvert(Exchange exchange, Object value) throws Exception {\n");
writer.append(" return ").append(toJava(from.getValue(), converterClasses)).append(";\n");
writer.append(" }\n");
writer.append(" });\n");
}
}
writer.append(" }\n");
writer.append("\n");
writer.append(" @Override\n");
writer.append(" public void load(TypeConverterRegistry registry) throws TypeConverterLoaderException {\n");
writer.append(" converters.forEach((k, v, c) -> registry.addTypeConverter(k, v, c));\n");
for (ExecutableElement ee : fallbackConverters) {
boolean allowNull = false;
boolean canPromote = false;
for (AnnotationMirror ann : ee.getAnnotationMirrors()) {
if (ann.getAnnotationType().asElement() == fallbackAnnotationType) {
for (Map.Entry<? extends ExecutableElement, ? extends AnnotationValue> entry : ann.getElementValues().entrySet()) {
switch (entry.getKey().getSimpleName().toString()) {
case "allowNull":
allowNull = (Boolean)entry.getValue().getValue();
break;
case "canPromote":
canPromote = (Boolean)entry.getValue().getValue();
break;
default:
throw new IllegalStateException();
}
}
}
}
writer.append(" registry.addFallbackTypeConverter(new TypeConverterSupport() {\n");
writer.append(" @Override\n");
writer.append(" public boolean allowNull() {\n");
writer.append(" return ").append(Boolean.toString(allowNull)).append(";\n");
writer.append(" }\n");
writer.append(" @Override\n");
writer.append(" public <T> T convertTo(Class<T> type, Exchange exchange, Object value) throws TypeConversionException {\n");
writer.append(" try {\n");
writer.append(" return (T) ").append(toJavaFallback(ee, converterClasses)).append(";\n");
writer.append(" } catch (TypeConversionException e) {\n");
writer.append(" throw e;\n");
writer.append(" } catch (Exception e) {\n");
writer.append(" throw new TypeConversionException(value, type, e);\n");
writer.append(" }\n");
writer.append(" }\n");
writer.append(" }, ").append(Boolean.toString(canPromote)).append(");\n");
}
writer.append(" }\n");
writer.append("\n");
for (String f : converterClasses) {
String s = f.substring(f.lastIndexOf('.') + 1);
String v = s.substring(0, 1).toLowerCase() + s.substring(1);
writer.append(" private volatile ").append(f).append(" ").append(v).append(";\n");
writer.append(" private ").append(f).append(" get").append(s).append("() {\n");
writer.append(" if (").append(v).append(" == null) {\n");
writer.append(" synchronized (this) {\n");
writer.append(" if (").append(v).append(" == null) {\n");
writer.append(" ").append(v).append(" = new ").append(f).append("();\n");
writer.append(" }\n");
writer.append(" }\n");
writer.append(" }\n");
writer.append(" return ").append(v).append(";\n");
writer.append(" }\n");
}
writer.append("}\n");
writer.flush();
}
}
private String toString(TypeMirror type) {
return type.toString().replaceAll("<.*>", "");
}
private String toJava(ExecutableElement converter, Set<String> converterClasses) {
String pfx;
if (converter.getModifiers().contains(Modifier.STATIC)) {
pfx = converter.getEnclosingElement().toString() + "." + converter.getSimpleName();
} else {
converterClasses.add(converter.getEnclosingElement().toString());
pfx = "get" + converter.getEnclosingElement().getSimpleName() + "()." + converter.getSimpleName();
}
String type = toString(converter.getParameters().get(0).asType());
String cast = type.equals("java.lang.Object") ? "" : "(" + type + ") ";
return pfx + "(" + cast + "value" + (converter.getParameters().size() == 2 ? ", exchange" : "") + ")";
}
private String toJavaFallback(ExecutableElement converter, Set<String> converterClasses) {
String pfx;
if (converter.getModifiers().contains(Modifier.STATIC)) {
pfx = converter.getEnclosingElement().toString() + "." + converter.getSimpleName();
} else {
converterClasses.add(converter.getEnclosingElement().toString());
pfx = "get" + converter.getEnclosingElement().getSimpleName() + "()." + converter.getSimpleName();
}
String type = toString(converter.getParameters().get(converter.getParameters().size() - 2).asType());
String cast = type.equals("java.lang.Object") ? "" : "(" + type + ") ";
return pfx + "(type, " + (converter.getParameters().size() == 4 ? "exchange, " : "") + cast + "value" + ", registry)";
}
}
| apache-2.0 |
nagyistoce/camunda-dmn-model | src/main/java/org/camunda/bpm/model/dmn/impl/DescriptionImpl.java | 1749 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.model.dmn.impl;
import static org.camunda.bpm.model.dmn.impl.DmnModelConstants.DMN10_NS;
import static org.camunda.bpm.model.dmn.impl.DmnModelConstants.DMN_ELEMENT_DESCRIPTION;
import org.camunda.bpm.model.dmn.instance.Description;
import org.camunda.bpm.model.xml.ModelBuilder;
import org.camunda.bpm.model.xml.impl.instance.ModelTypeInstanceContext;
import org.camunda.bpm.model.xml.type.ModelElementTypeBuilder;
import org.camunda.bpm.model.xml.type.ModelElementTypeBuilder.ModelTypeInstanceProvider;
public class DescriptionImpl extends DmnModelElementInstanceImpl implements Description {
public DescriptionImpl(ModelTypeInstanceContext instanceContext) {
super(instanceContext);
}
public static void registerType(ModelBuilder modelBuilder) {
ModelElementTypeBuilder typeBuilder = modelBuilder.defineType(Description.class, DMN_ELEMENT_DESCRIPTION)
.namespaceUri(DMN10_NS)
.instanceProvider(new ModelTypeInstanceProvider<Description>() {
public Description newInstance(ModelTypeInstanceContext instanceContext) {
return new DescriptionImpl(instanceContext);
}
});
typeBuilder.build();
}
}
| apache-2.0 |
ErnestasMitkus/Wilded | wilded-graphics/src/main/java/com/ernestas/terrains/Terrain.java | 6464 | package com.ernestas.terrains;
import com.ernestas.models.RawModel;
import com.ernestas.renderEngine.Loader;
import com.ernestas.textures.ModelTexture;
import com.ernestas.textures.TerrainTexture;
import com.ernestas.textures.TerrainTexturePack;
import com.ernestas.toolbox.Maths;
import org.lwjgl.util.vector.Vector2f;
import org.lwjgl.util.vector.Vector3f;
import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.util.List;
public class Terrain {
public static final float SIZE = 800;
private static final float MAX_HEIGHT = 40;
private static final float MAX_PIXEL_COLOUR = 256 * 256 * 256;
private float x;
private float z;
private RawModel model;
private TerrainTexturePack texturePack;
private TerrainTexture blendMap;
private float[][] heights;
public Terrain(int gridX, int gridZ, Loader loader, TerrainTexturePack texturePack, TerrainTexture blendMap,
String heightMap) {
this.texturePack = texturePack;
this.blendMap = blendMap;
this.x = gridX * SIZE;
this.z = gridZ * SIZE;
this.model = generateTerrain(loader, heightMap);
}
public TerrainTexturePack getTexturePack() {
return texturePack;
}
public TerrainTexture getBlendMap() {
return blendMap;
}
public float getX() {
return x;
}
public float getZ() {
return z;
}
public RawModel getModel() {
return model;
}
public float getHeightOfTerrain(float worldX, float worldZ) {
float terrainX = worldX - this.x;
float terrainZ = worldZ - this.z;
float gridSquareSize = SIZE / ((float) heights.length - 1);
int gridX = (int) Math.floor(terrainX / gridSquareSize);
int gridZ = (int) Math.floor(terrainZ / gridSquareSize);
if (gridX >= heights.length - 1 || gridZ >= heights.length - 1 || gridX < 0 || gridZ < 0) {
return 0;
}
// get coordinates in a square to be in range [0; 1]
float xCoord = (terrainX % gridSquareSize) / gridSquareSize;
float zCoord = (terrainZ % gridSquareSize) / gridSquareSize;
// get correct triangle
float answer;
if (xCoord <= (1-zCoord)) {
answer = Maths.barryCentric(new Vector3f(0, heights[gridX][gridZ], 0), new Vector3f(1,
heights[gridX + 1][gridZ], 0), new Vector3f(0,
heights[gridX][gridZ + 1], 1), new Vector2f(xCoord, zCoord));
} else {
answer = Maths.barryCentric(new Vector3f(1, heights[gridX + 1][gridZ], 0), new Vector3f(1,
heights[gridX + 1][gridZ + 1], 1), new Vector3f(0,
heights[gridX][gridZ + 1], 1), new Vector2f(xCoord, zCoord));
}
return answer;
}
private RawModel generateTerrain(Loader loader, String heightMap){
BufferedImage image = null;
try {
image = ImageIO.read(new File("textures/" + heightMap + ".png"));
} catch (IOException e) {
e.printStackTrace();
}
int VERTEX_COUNT = image.getHeight();
heights = new float[VERTEX_COUNT][VERTEX_COUNT];
int count = VERTEX_COUNT * VERTEX_COUNT;
float[] vertices = new float[count * 3];
float[] normals = new float[count * 3];
float[] textureCoords = new float[count*2];
int[] indices = new int[6*(VERTEX_COUNT-1)*(VERTEX_COUNT*1)];
int vertexPointer = 0;
for(int i=0;i<VERTEX_COUNT;i++){
for(int j=0;j<VERTEX_COUNT;j++){
float height = getHeight(j, i, image);
heights[j][i] = height;
vertices[vertexPointer*3] = (float)j/((float)VERTEX_COUNT - 1) * SIZE;
vertices[vertexPointer*3+1] = height;
vertices[vertexPointer*3+2] = (float)i/((float)VERTEX_COUNT - 1) * SIZE;
Vector3f normal = calculateNormal(j, i, image);
normals[vertexPointer*3] = normal.x;
normals[vertexPointer*3+1] = normal.y;
normals[vertexPointer*3+2] = normal.z;
textureCoords[vertexPointer*2] = (float)j/((float)VERTEX_COUNT - 1);
textureCoords[vertexPointer*2+1] = (float)i/((float)VERTEX_COUNT - 1);
vertexPointer++;
}
}
int pointer = 0;
for(int gz=0;gz<VERTEX_COUNT-1;gz++){
for(int gx=0;gx<VERTEX_COUNT-1;gx++){
int topLeft = (gz*VERTEX_COUNT)+gx;
int topRight = topLeft + 1;
int bottomLeft = ((gz+1)*VERTEX_COUNT)+gx;
int bottomRight = bottomLeft + 1;
indices[pointer++] = topLeft;
indices[pointer++] = bottomLeft;
indices[pointer++] = topRight;
indices[pointer++] = topRight;
indices[pointer++] = bottomLeft;
indices[pointer++] = bottomRight;
}
}
return loader.loadToVAO(vertices, textureCoords, normals, indices);
}
private Vector3f calculateNormal(int x, int z, BufferedImage image) {
float heightL = getHeight(x - 1, z, image);
float heightR = getHeight(x + 1, z, image);
float heightD = getHeight(x, z - 1, image);
float heightU = getHeight(x, z + 1, image);
Vector3f normal = new Vector3f(heightL - heightR, 2f, heightD - heightU);
normal.normalise();
return normal;
}
private float getHeight(int x, int y, BufferedImage image) {
if (x < 0 || x >= image.getHeight() ||
y < 0 || y >= image.getHeight()) {
return 0;
}
float height = image.getRGB(x, y);
height += MAX_PIXEL_COLOUR / 2f;
height /= MAX_PIXEL_COLOUR / 2f;
height *= MAX_HEIGHT;
return height;
}
public static Terrain getCurrentTerrain(List<Terrain> terrains, float worldX, float worldZ) {
float gridX = (int) Math.floor(worldX / Terrain.SIZE) * Terrain.SIZE;
float gridZ = (int) Math.floor(worldZ / Terrain.SIZE) * Terrain.SIZE;
Terrain currentTerrain = null;
for (Terrain terrain : terrains) {
if (terrain.getX() == gridX && terrain.getZ() == gridZ) {
currentTerrain = terrain;
break;
}
}
return currentTerrain;
}
}
| apache-2.0 |
xsonorg/xson | src/main/java/org/xson/core/serializer/CollectionSerializer.java | 1021 | package org.xson.core.serializer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import org.xson.core.WriterModel;
public class CollectionSerializer extends DefaultSerializer {
public final static CollectionSerializer instance = new CollectionSerializer();
@Override
public void write(Object target, WriterModel model) {
model.appendCreateUserObject(target.getClass());
if (target instanceof ArrayList) {
arrayListWrite(target, model);
} else {
collectionWrite(target, model);
}
model.writeEnd();
}
private void arrayListWrite(Object target, WriterModel model) {
ArrayList<?> list = (ArrayList<?>) target;
int size = list.size();
for (int i = 0; i < size; i++) {
model.writeObject(list.get(i));
}
}
private void collectionWrite(Object target, WriterModel model) {
Iterator<?> iterator = ((Collection<?>) target).iterator();
while (iterator.hasNext()) {
model.writeObject(iterator.next());
}
}
}
| apache-2.0 |
litkowiecdamian/java_course | soap-sample/src/main/java/net/webservicex/GeoIPServiceHttpPost.java | 1271 | package net.webservicex;
import javax.jws.WebMethod;
import javax.jws.WebParam;
import javax.jws.WebResult;
import javax.jws.WebService;
import javax.jws.soap.SOAPBinding;
import javax.xml.bind.annotation.XmlSeeAlso;
/**
* This class was generated by Apache CXF 3.1.12
* 2017-07-13T10:51:06.430+02:00
* Generated source version: 3.1.12
*
*/
@WebService(targetNamespace = "http://www.webservicex.net/", name = "GeoIPServiceHttpPost")
@XmlSeeAlso({ObjectFactory.class})
@SOAPBinding(parameterStyle = SOAPBinding.ParameterStyle.BARE)
public interface GeoIPServiceHttpPost {
/**
* GeoIPService - GetGeoIP enables you to easily look up countries by IP addresses
*/
@WebMethod(operationName = "GetGeoIP")
@WebResult(name = "GeoIP", targetNamespace = "http://www.webservicex.net/", partName = "Body")
public GeoIP getGeoIP(
@WebParam(partName = "IPAddress", name = "IPAddress", targetNamespace = "")
java.lang.String ipAddress
);
/**
* GeoIPService - GetGeoIPContext enables you to easily look up countries by Context
*/
@WebMethod(operationName = "GetGeoIPContext")
@WebResult(name = "GeoIP", targetNamespace = "http://www.webservicex.net/", partName = "Body")
public GeoIP getGeoIPContext();
}
| apache-2.0 |
GoogleCloudDataproc/hadoop-connectors | util/src/main/java/com/google/cloud/hadoop/util/RedactedString.java | 1272 | /*
* Copyright 2020 Google LLC. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.hadoop.util;
import static com.google.common.base.Strings.isNullOrEmpty;
import com.google.auto.value.AutoValue;
import javax.annotation.Nullable;
/**
* Holder class for string values that should not be logged and displayed when {@code toString}
* method called. For example, it should be used for credentials.
*/
@AutoValue
public abstract class RedactedString {
@Nullable
public static RedactedString create(@Nullable String value) {
return isNullOrEmpty(value) ? null : new AutoValue_RedactedString(value);
}
public abstract String value();
@Override
public String toString() {
return "<redacted>";
}
}
| apache-2.0 |
alibaba/ARouter | module-java/src/main/java/com/alibaba/android/arouter/demo/module1/TestModule2Activity.java | 470 | package com.alibaba.android.arouter.demo.module1;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import com.alibaba.android.arouter.facade.annotation.Route;
@Route(path = "/module/2", group = "m2")
public class TestModule2Activity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_test_module2);
}
}
| apache-2.0 |
openflint/Connect-SDK-Android | modules/matchstick/src/com/connectsdk/service/flint/FlintServiceChannel.java | 1917 | /*
* CastServiceChannel
* Connect SDK
*
* Copyright (c) 2014 LG Electronics.
* Created by Hyun Kook Khang on 24 Feb 2014
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.connectsdk.service.flint;
import com.connectsdk.core.Util;
import com.connectsdk.service.sessions.FlintWebAppSession;
import org.json.JSONException;
import org.json.JSONObject;
import tv.matchstick.flint.Flint;
import tv.matchstick.flint.FlintDevice;
public class FlintServiceChannel implements Flint.MessageReceivedCallback{
String webAppId;
FlintWebAppSession session;
public FlintServiceChannel(String webAppId, FlintWebAppSession session) {
this.webAppId = webAppId;
this.session = session;
}
public String getNamespace() {
return "urn:x-cast:com.connectsdk";
}
@Override
public void onMessageReceived(FlintDevice castDevice, String namespace, final String message) {
if (session.getWebAppSessionListener() == null)
return;
JSONObject messageJSON = null;
try {
messageJSON = new JSONObject(message);
} catch (JSONException e) { }
final JSONObject mMessage = messageJSON;
Util.runOnUI(new Runnable() {
@Override
public void run() {
if (mMessage == null) {
session.getWebAppSessionListener().onReceiveMessage(session, message);
} else {
session.getWebAppSessionListener().onReceiveMessage(session, mMessage);
}
}
});
}
}
| apache-2.0 |
remibergsma/cosmic | cosmic-core/engine/schema/src/main/java/com/cloud/storage/dao/VMTemplatePoolDao.java | 1593 | package com.cloud.storage.dao;
import com.cloud.engine.subsystem.api.storage.DataObjectInStore;
import com.cloud.engine.subsystem.api.storage.ObjectInDataStoreStateMachine;
import com.cloud.storage.VMTemplateStoragePoolVO;
import com.cloud.utils.db.GenericDao;
import com.cloud.utils.fsm.StateDao;
import java.util.List;
public interface VMTemplatePoolDao extends GenericDao<VMTemplateStoragePoolVO, Long>,
StateDao<ObjectInDataStoreStateMachine.State, ObjectInDataStoreStateMachine.Event, DataObjectInStore> {
public List<VMTemplateStoragePoolVO> listByPoolId(long id);
public List<VMTemplateStoragePoolVO> listByTemplateId(long templateId);
public VMTemplateStoragePoolVO findByPoolTemplate(long poolId, long templateId);
public List<VMTemplateStoragePoolVO> listByTemplateStatus(long templateId, VMTemplateStoragePoolVO.Status downloadState);
public List<VMTemplateStoragePoolVO> listByTemplateStatus(long templateId, VMTemplateStoragePoolVO.Status downloadState, long poolId);
public List<VMTemplateStoragePoolVO> listByTemplateStatus(long templateId, long datacenterId, VMTemplateStoragePoolVO.Status downloadState);
public List<VMTemplateStoragePoolVO> listByTemplateStatus(long templateId, long datacenterId, long podId, VMTemplateStoragePoolVO.Status downloadState);
public List<VMTemplateStoragePoolVO> listByTemplateStates(long templateId, VMTemplateStoragePoolVO.Status... states);
boolean templateAvailable(long templateId, long poolId);
public VMTemplateStoragePoolVO findByHostTemplate(Long hostId, Long templateId);
}
| apache-2.0 |
oscerd/camel | components/camel-hazelcast/src/test/java/org/apache/camel/component/hazelcast/HazelcastMultimapProducerTest.java | 5343 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.hazelcast;
import java.util.Arrays;
import java.util.Collection;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.core.MultiMap;
import org.apache.camel.CamelExecutionException;
import org.apache.camel.builder.RouteBuilder;
import org.junit.After;
import org.junit.Test;
import org.mockito.Mock;
import static org.mockito.Mockito.*;
public class HazelcastMultimapProducerTest extends HazelcastCamelTestSupport {
@Mock
private MultiMap<Object, Object> map;
@Override
protected void trainHazelcastInstance(HazelcastInstance hazelcastInstance) {
when(hazelcastInstance.getMultiMap("bar")).thenReturn(map);
}
@Override
protected void verifyHazelcastInstance(HazelcastInstance hazelcastInstance) {
verify(hazelcastInstance, atLeastOnce()).getMultiMap("bar");
}
@After
public void verifyMapMock() {
verifyNoMoreInteractions(map);
}
@Test(expected = CamelExecutionException.class)
public void testWithInvalidOperation() {
template.sendBodyAndHeader("direct:putInvalid", "my-foo", HazelcastConstants.OBJECT_ID, "4711");
}
@Test
public void testPut() throws InterruptedException {
template.sendBodyAndHeader("direct:put", "my-foo", HazelcastConstants.OBJECT_ID, "4711");
verify(map).put("4711", "my-foo");
}
@Test
public void testPutWithOperationName() throws InterruptedException {
template.sendBodyAndHeader("direct:putWithOperationName", "my-foo", HazelcastConstants.OBJECT_ID, "4711");
verify(map).put("4711", "my-foo");
}
@Test
public void testPutWithOperationNumber() throws InterruptedException {
template.sendBodyAndHeader("direct:putWithOperationNumber", "my-foo", HazelcastConstants.OBJECT_ID, "4711");
verify(map).put("4711", "my-foo");
}
@Test
public void testRemoveValue() {
template.sendBodyAndHeader("direct:removevalue", "my-foo", HazelcastConstants.OBJECT_ID, "4711");
verify(map).remove("4711", "my-foo");
}
@Test
public void testGet() {
when(map.get("4711")).thenReturn(Arrays.<Object>asList("my-foo"));
template.sendBodyAndHeader("direct:get", null, HazelcastConstants.OBJECT_ID, "4711");
verify(map).get("4711");
Collection<?> body = consumer.receiveBody("seda:out", 5000, Collection.class);
assertTrue(body.contains("my-foo"));
}
@Test
public void testDelete() {
template.sendBodyAndHeader("direct:delete", null, HazelcastConstants.OBJECT_ID, 4711);
verify(map).remove(4711);
}
@Test
public void testClear() {
template.sendBody("direct:clear", "test");
verify(map).clear();
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:putInvalid").setHeader(HazelcastConstants.OPERATION, constant("bogus")).to(String.format("hazelcast:%sbar", HazelcastConstants.MULTIMAP_PREFIX));
from("direct:put").setHeader(HazelcastConstants.OPERATION, constant(HazelcastConstants.PUT_OPERATION)).to(String.format("hazelcast:%sbar", HazelcastConstants.MULTIMAP_PREFIX));
from("direct:removevalue").setHeader(HazelcastConstants.OPERATION, constant(HazelcastConstants.REMOVEVALUE_OPERATION)).to(
String.format("hazelcast:%sbar", HazelcastConstants.MULTIMAP_PREFIX));
from("direct:get").setHeader(HazelcastConstants.OPERATION, constant(HazelcastConstants.GET_OPERATION)).to(String.format("hazelcast:%sbar", HazelcastConstants.MULTIMAP_PREFIX))
.to("seda:out");
from("direct:delete").setHeader(HazelcastConstants.OPERATION, constant(HazelcastConstants.DELETE_OPERATION)).to(String.format("hazelcast:%sbar", HazelcastConstants.MULTIMAP_PREFIX));
from("direct:clear").setHeader(HazelcastConstants.OPERATION, constant(HazelcastConstants.CLEAR_OPERATION)).to(String.format("hazelcast:%sbar", HazelcastConstants.MULTIMAP_PREFIX));
from("direct:putWithOperationNumber").toF("hazelcast:%sbar?operation=%s", HazelcastConstants.MULTIMAP_PREFIX, HazelcastConstants.PUT_OPERATION);
from("direct:putWithOperationName").toF("hazelcast:%sbar?operation=put", HazelcastConstants.MULTIMAP_PREFIX);
}
};
}
}
| apache-2.0 |
s-webber/projog | src/test/java/org/projog/core/kb/KnowledgeBaseServiceLocatorTest.java | 6195 | /*
* Copyright 2013 S. Webber
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.projog.core.kb;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.fail;
import static org.projog.TestUtils.createKnowledgeBase;
import java.io.Serializable;
import java.util.Map;
import org.junit.Test;
import org.projog.core.kb.KnowledgeBase;
import org.projog.core.kb.KnowledgeBaseServiceLocator;
public class KnowledgeBaseServiceLocatorTest {
/** Tests one-to-one relationship between KnowledgeBase and KnowledgeBaseServiceLocator instances */
@Test
public void testCreation() {
KnowledgeBase kb1 = createKnowledgeBase();
KnowledgeBaseServiceLocator sl1 = KnowledgeBaseServiceLocator.getServiceLocator(kb1);
assertNotNull(sl1);
assertSame(sl1, KnowledgeBaseServiceLocator.getServiceLocator(kb1));
KnowledgeBase kb2 = createKnowledgeBase();
KnowledgeBaseServiceLocator sl2 = KnowledgeBaseServiceLocator.getServiceLocator(kb2);
assertNotNull(sl2);
assertNotSame(sl1, sl2);
}
@Test
public void testGetInstance_OneArgument() {
KnowledgeBaseServiceLocator l = createKnowledgeBaseServiceLocator();
Object o = l.getInstance(Object.class);
assertSame(o, l.getInstance(Object.class));
StringBuilder sb = l.getInstance(StringBuilder.class);
assertSame(sb, l.getInstance(StringBuilder.class));
assertNotSame(sb, o);
assertNotSame(sb, l.getInstance(StringBuffer.class));
}
@Test
public void testGetInstance_TwoArguments() {
KnowledgeBaseServiceLocator l = createKnowledgeBaseServiceLocator();
StringBuilder o = l.getInstance(Object.class, StringBuilder.class);
assertSame(o, l.getInstance(Object.class, StringBuilder.class));
assertSame(o, l.getInstance(Object.class, StringBuffer.class));
assertSame(o, l.getInstance(Object.class));
StringBuilder c = l.getInstance(CharSequence.class, StringBuilder.class);
assertSame(c, l.getInstance(CharSequence.class, StringBuilder.class));
assertSame(c, l.getInstance(CharSequence.class, StringBuffer.class));
assertSame(c, l.getInstance(CharSequence.class));
assertNotSame(o, c);
assertNotSame(o, l.getInstance(StringBuilder.class));
assertNotSame(c, l.getInstance(StringBuilder.class));
}
@Test
public void testGetInstance_Interface() {
try {
createKnowledgeBaseServiceLocator().getInstance(Serializable.class);
fail();
} catch (RuntimeException e) {
assertEquals("Could not create new instance of service: interface java.io.Serializable", e.getMessage());
}
}
@Test
public void testGetInstance_NoValidConstructor() {
try {
createKnowledgeBaseServiceLocator().getInstance(Integer.class);
fail();
} catch (RuntimeException e) {
assertEquals("Could not create new instance of service: class java.lang.Integer", e.getMessage());
}
}
@Test
public void testGetInstance_InstanceDoesNotExtendReference() {
try {
createKnowledgeBaseServiceLocator().getInstance(StringBuffer.class, StringBuilder.class);
fail();
} catch (IllegalArgumentException e) {
assertEquals("class java.lang.StringBuilder is not of type: class java.lang.StringBuffer", e.getMessage());
}
}
public void testGetInstance_InstanceDoesNotImplementReference() {
try {
createKnowledgeBaseServiceLocator().getInstance(Map.class, StringBuilder.class);
fail();
} catch (IllegalArgumentException e) {
assertEquals("class java.lang.StringBuilder is not of type: interface java.util.Map", e.getMessage());
}
}
@Test
public void testAddInstance() {
KnowledgeBaseServiceLocator l = createKnowledgeBaseServiceLocator();
String s = "hello";
l.addInstance(String.class, s);
assertSame(s, l.getInstance(String.class));
}
@Test
public void testAddInstance_IllegalStateException() {
KnowledgeBaseServiceLocator l = createKnowledgeBaseServiceLocator();
l.addInstance(String.class, "hello");
try {
l.addInstance(String.class, "hello");
fail();
} catch (IllegalStateException e) {
assertEquals("Already have a service with key: class java.lang.String", e.getMessage());
}
}
@Test
public void testAddInstance_IllegalArgumentException() {
try {
createKnowledgeBaseServiceLocator().addInstance(StringBuilder.class, "hello");
fail();
} catch (IllegalArgumentException e) {
assertEquals("hello is not of type: class java.lang.StringBuilder", e.getMessage());
}
}
/** Test that the KnowledgeBase gets passed as an argument to the constructor of new services */
@Test
public void testClassWithSingleKnowledgeBaseArgumentConstrutor() {
KnowledgeBase kb = createKnowledgeBase();
KnowledgeBaseServiceLocator l = KnowledgeBaseServiceLocator.getServiceLocator(kb);
DummyService s = l.getInstance(DummyService.class);
assertSame(s, l.getInstance(DummyService.class));
assertSame(kb, s.kb);
}
private KnowledgeBaseServiceLocator createKnowledgeBaseServiceLocator() {
KnowledgeBase kb = createKnowledgeBase();
return KnowledgeBaseServiceLocator.getServiceLocator(kb);
}
public static class DummyService {
private final KnowledgeBase kb;
public DummyService(KnowledgeBase kb) {
this.kb = kb;
}
}
}
| apache-2.0 |
lixinjie1985/spring-restful | src/main/java/org/eop/spring/restful/exception/handler/ExceptionResponseEntityExceptionHandler.java | 1204 | package org.eop.spring.restful.exception.handler;
import org.springframework.core.Ordered;
import org.springframework.core.annotation.Order;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RestControllerAdvice;
import org.springframework.web.context.request.WebRequest;
import org.springframework.web.servlet.mvc.method.annotation.ResponseEntityExceptionHandler;
/**
* 这种方式可以用来统一处理异常,但是需要继承spring的类
* 有点耦合
* @author lixinjie
* @since 2017-09-12
*/
@Order(Ordered.LOWEST_PRECEDENCE)
@RestControllerAdvice
public class ExceptionResponseEntityExceptionHandler extends ResponseEntityExceptionHandler {
@Override
protected ResponseEntity<Object> handleExceptionInternal(Exception ex, Object body, HttpHeaders headers,
HttpStatus status, WebRequest request) {
System.out.println("ResponseEntityExceptionHandler:" + ex.getClass());
System.out.println("ResponseEntityExceptionHandler:" + status);
return super.handleExceptionInternal(ex, body, headers, status, request);
}
}
| apache-2.0 |
virjar/sipsoup | src/main/java/com/virjar/sipsoup/exception/NoSuchFunctionException.java | 296 | package com.virjar.sipsoup.exception;
/**
* @author github.com/zhegexiaohuozi [seimimaster@gmail.com] Date: 14-3-16
*/
public class NoSuchFunctionException extends XpathSyntaxErrorException {
public NoSuchFunctionException(int errorPos, String msg) {
super(errorPos, msg);
}
}
| apache-2.0 |
tosanboom/java-sdk | src/main/java/ir/boommarket/cards/TransferDestinationType.java | 126 | package ir.boommarket.cards;
/**
* @author Mona Mohamadinia
*/
public enum TransferDestinationType {
PAN,
DEPOSIT
} | apache-2.0 |
googleapis/java-container | proto-google-cloud-container-v1beta1/src/main/java/com/google/container/v1beta1/ReservationAffinityOrBuilder.java | 3503 | /*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/container/v1beta1/cluster_service.proto
package com.google.container.v1beta1;
public interface ReservationAffinityOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.container.v1beta1.ReservationAffinity)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* Corresponds to the type of reservation consumption.
* </pre>
*
* <code>.google.container.v1beta1.ReservationAffinity.Type consume_reservation_type = 1;</code>
*
* @return The enum numeric value on the wire for consumeReservationType.
*/
int getConsumeReservationTypeValue();
/**
*
*
* <pre>
* Corresponds to the type of reservation consumption.
* </pre>
*
* <code>.google.container.v1beta1.ReservationAffinity.Type consume_reservation_type = 1;</code>
*
* @return The consumeReservationType.
*/
com.google.container.v1beta1.ReservationAffinity.Type getConsumeReservationType();
/**
*
*
* <pre>
* Corresponds to the label key of a reservation resource. To target a
* SPECIFIC_RESERVATION by name, specify "googleapis.com/reservation-name" as
* the key and specify the name of your reservation as its value.
* </pre>
*
* <code>string key = 2;</code>
*
* @return The key.
*/
java.lang.String getKey();
/**
*
*
* <pre>
* Corresponds to the label key of a reservation resource. To target a
* SPECIFIC_RESERVATION by name, specify "googleapis.com/reservation-name" as
* the key and specify the name of your reservation as its value.
* </pre>
*
* <code>string key = 2;</code>
*
* @return The bytes for key.
*/
com.google.protobuf.ByteString getKeyBytes();
/**
*
*
* <pre>
* Corresponds to the label value(s) of reservation resource(s).
* </pre>
*
* <code>repeated string values = 3;</code>
*
* @return A list containing the values.
*/
java.util.List<java.lang.String> getValuesList();
/**
*
*
* <pre>
* Corresponds to the label value(s) of reservation resource(s).
* </pre>
*
* <code>repeated string values = 3;</code>
*
* @return The count of values.
*/
int getValuesCount();
/**
*
*
* <pre>
* Corresponds to the label value(s) of reservation resource(s).
* </pre>
*
* <code>repeated string values = 3;</code>
*
* @param index The index of the element to return.
* @return The values at the given index.
*/
java.lang.String getValues(int index);
/**
*
*
* <pre>
* Corresponds to the label value(s) of reservation resource(s).
* </pre>
*
* <code>repeated string values = 3;</code>
*
* @param index The index of the value to return.
* @return The bytes of the values at the given index.
*/
com.google.protobuf.ByteString getValuesBytes(int index);
}
| apache-2.0 |
gsteelman/utd | cs6301-probabilistic-graphical-models/homework3/src/pgm/EvidenceHashMap.java | 1647 | package pgm;
import java.util.HashMap;
import java.util.Map;
public class EvidenceHashMap implements EvidenceCollectionType {
HashMap<Integer,Integer> evid = new HashMap<Integer,Integer>();
@Override
public String toString() {
StringBuilder s = new StringBuilder();
s.append("e: [" + size() + "] ");
for (Map.Entry<Integer,Integer> e : evid.entrySet()) {
s.append(e.getKey() + ":" + e.getValue() + " ");
}
return s.toString();
}
@Override
public Integer get(VariableType v) {
return get(v.id());
}
@Override
public Integer get(Integer vID) {
return evid.get(vID);
}
@Override
public boolean set(VariableType v, Integer e) {
return set(v.id(), e);
}
@Override
public boolean set(Integer vID, Integer e) {
evid.remove(vID);
evid.put(vID, e);
return true;
}
@Override
public boolean add(VariableType v, Integer e) {
return add(v.id(), e);
}
@Override
public boolean add(Integer vID, Integer e) {
if (evid.containsKey(vID)) {
return false;
}
evid.put(vID, e);
return true;
}
@Override
public boolean remove(VariableType v) {
return remove(v.id());
}
@Override
public boolean remove(Integer vID) {
Integer r = evid.remove(vID);
return r == null ? false : true;
}
@Override
public int size() {
return evid.size();
}
@Override
public boolean isEmpty() {
return size() == 0;
}
@Override
public boolean contains(VariableType v) {
return contains(v.id());
}
@Override
public boolean contains(Integer vID) {
return evid.containsKey(vID);
}
}
| apache-2.0 |
PRImA-Research-Lab/prima-core-libs | java/PrimaBasic/src/org/primaresearch/shared/variable/StringValue.java | 1511 | /*
* Copyright 2019 PRImA Research Lab, University of Salford, United Kingdom
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.primaresearch.shared.variable;
/**
* Value object for Variable holding a string.
*
* @author Christian Clausner
*
*/
public class StringValue extends VariableValue {
private static final long serialVersionUID = 1L;
public String val;
/**
* Constructor with initialisation to empty string.
*/
public StringValue() {
this.val = "";
}
/**
* Constructor with custom initialisation.
*/
public StringValue(String val) {
this.val = val;
}
@Override
public String toString() {
return val != null ? val : "";
}
@Override
public String getType() {
return "String";
}
@Override
public boolean equals(Object obj) {
if (obj == null)
return false;
if (obj instanceof StringValue)
return this.val == ((StringValue)obj).val;
if (obj instanceof String)
return ((String)obj).equals(this.val);
return false;
}
}
| apache-2.0 |
daileyet/webscheduler | src/main/java/com/openthinks/webscheduler/help/trigger/ITriggerGenerator.java | 421 | package com.openthinks.webscheduler.help.trigger;
import com.openthinks.easyweb.context.handler.WebAttributers;
import com.openthinks.webscheduler.model.task.ITaskTrigger;
/**
* ITaskTrigger generator
* @author dailey.yet@outlook.com
*
*/
public interface ITriggerGenerator {
/**
*
* @param was WebAttributers
* @return ITaskTrigger
*/
public ITaskTrigger generate(WebAttributers was);
} | apache-2.0 |
rh-messaging/jboss-activemq-artemis | artemis-jdbc-store/src/main/java/org/apache/activemq/artemis/jdbc/store/file/JDBCSequentialFileFactory.java | 8369 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.jdbc.store.file;
import javax.sql.DataSource;
import java.io.File;
import java.nio.ByteBuffer;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executor;
import org.apache.activemq.artemis.core.io.IOCriticalErrorListener;
import org.apache.activemq.artemis.core.io.SequentialFile;
import org.apache.activemq.artemis.core.io.SequentialFileFactory;
import org.apache.activemq.artemis.core.io.nio.NIOSequentialFileFactory;
import org.apache.activemq.artemis.core.server.ActiveMQComponent;
import org.apache.activemq.artemis.jdbc.store.sql.SQLProvider;
import org.apache.activemq.artemis.journal.ActiveMQJournalLogger;
import org.apache.activemq.artemis.utils.ConcurrentHashSet;
import org.jboss.logging.Logger;
public class JDBCSequentialFileFactory implements SequentialFileFactory, ActiveMQComponent {
private static final Logger logger = Logger.getLogger(JDBCSequentialFile.class);
private boolean started;
private final Set<JDBCSequentialFile> files = new ConcurrentHashSet<>();
private final Executor executor;
private final Map<String, Object> fileLocks = new ConcurrentHashMap<>();
private JDBCSequentialFileFactoryDriver dbDriver;
private final IOCriticalErrorListener criticalErrorListener;
public JDBCSequentialFileFactory(final DataSource dataSource,
final SQLProvider sqlProvider,
Executor executor,
IOCriticalErrorListener criticalErrorListener) throws Exception {
this.executor = executor;
this.criticalErrorListener = criticalErrorListener;
try {
this.dbDriver = JDBCFileUtils.getDBFileDriver(dataSource, sqlProvider);
} catch (SQLException e) {
criticalErrorListener.onIOException(e, "Failed to start JDBC Driver", null);
}
}
public JDBCSequentialFileFactory(final String connectionUrl,
final String className,
final SQLProvider sqlProvider,
Executor executor,
IOCriticalErrorListener criticalErrorListener) throws Exception {
this.executor = executor;
this.criticalErrorListener = criticalErrorListener;
try {
this.dbDriver = JDBCFileUtils.getDBFileDriver(className, connectionUrl, sqlProvider);
} catch (SQLException e) {
criticalErrorListener.onIOException(e, "Failed to start JDBC Driver", null);
}
}
public JDBCSequentialFileFactory(final Connection connection,
final SQLProvider sqlProvider,
final Executor executor,
final IOCriticalErrorListener criticalErrorListener) throws Exception {
this.executor = executor;
this.criticalErrorListener = criticalErrorListener;
try {
this.dbDriver = JDBCFileUtils.getDBFileDriver(connection, sqlProvider);
} catch (SQLException e) {
criticalErrorListener.onIOException(e, "Failed to start JDBC Driver", null);
}
}
public JDBCSequentialFileFactoryDriver getDbDriver() {
return dbDriver;
}
/**
* @see Connection#setNetworkTimeout(Executor, int)
**/
public JDBCSequentialFileFactory setNetworkTimeout(Executor executor, int milliseconds) {
this.dbDriver.setNetworkTimeout(executor, milliseconds);
return this;
}
@Override
public SequentialFileFactory setDatasync(boolean enabled) {
return this;
}
@Override
public boolean isDatasync() {
return false;
}
@Override
public synchronized void start() {
try {
if (!started) {
dbDriver.start();
started = true;
}
} catch (Exception e) {
criticalErrorListener.onIOException(e, "Unable to start database driver", null);
started = false;
}
}
@Override
public synchronized void stop() {
try {
dbDriver.stop();
} catch (SQLException e) {
ActiveMQJournalLogger.LOGGER.error("Error stopping file factory, unable to close db connection");
}
started = false;
}
@Override
public SequentialFile createSequentialFile(String fileName) {
try {
fileLocks.putIfAbsent(fileName, new Object());
JDBCSequentialFile file = new JDBCSequentialFile(this, fileName, executor, dbDriver, fileLocks.get(fileName));
files.add(file);
return file;
} catch (Exception e) {
criticalErrorListener.onIOException(e, "Error whilst creating JDBC file", null);
}
return null;
}
public void sequentialFileClosed(SequentialFile file) {
files.remove(file);
}
public int getNumberOfOpenFiles() {
return files.size();
}
@Override
public int getMaxIO() {
return 1;
}
@Override
public List<String> listFiles(String extension) throws Exception {
try {
return dbDriver.listFiles(extension);
} catch (SQLException e) {
criticalErrorListener.onIOException(e, "Error listing JDBC files.", null);
throw e;
}
}
@Override
public boolean isSupportsCallbacks() {
return true;
}
@Override
public void onIOError(Exception exception, String message, SequentialFile file) {
criticalErrorListener.onIOException(exception, message, file);
}
@Override
public ByteBuffer allocateDirectBuffer(final int size) {
return NIOSequentialFileFactory.allocateDirectByteBuffer(size);
}
@Override
public void releaseDirectBuffer(ByteBuffer buffer) {
// nothing we can do on this case. we can just have good faith on GC
}
@Override
public ByteBuffer newBuffer(final int size) {
return ByteBuffer.allocate(size);
}
@Override
public void clearBuffer(final ByteBuffer buffer) {
final int limit = buffer.limit();
buffer.rewind();
for (int i = 0; i < limit; i++) {
buffer.put((byte) 0);
}
buffer.rewind();
}
@Override
public ByteBuffer wrapBuffer(final byte[] bytes) {
return ByteBuffer.wrap(bytes);
}
@Override
public int getAlignment() {
return 1;
}
@Override
public JDBCSequentialFileFactory setAlignment(int alignment) {
// no op
return this;
}
@Override
public int calculateBlockSize(final int bytes) {
return bytes;
}
@Override
public void deactivateBuffer() {
}
@Override
public void releaseBuffer(final ByteBuffer buffer) {
}
@Override
public void activateBuffer(SequentialFile file) {
}
@Override
public File getDirectory() {
return null;
}
@Override
public boolean isStarted() {
return started;
}
@Override
public void createDirs() throws Exception {
}
@Override
public void flush() {
for (SequentialFile file : files) {
try {
file.sync();
} catch (Exception e) {
criticalErrorListener.onIOException(e, "Error during JDBC file sync.", file);
}
}
}
public synchronized void destroy() throws SQLException {
try {
dbDriver.destroy();
} catch (SQLException e) {
logger.error("Error destroying file factory", e);
}
}
}
| apache-2.0 |
lenovoDTC/dubbo-G | dubbo-common/src/test/java/com/alibaba/dubbo/common/extensionloader/ext6_wrap/impl/Ext5Wrapper1.java | 1219 | /*
* Copyright 1999-2011 Alibaba Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.dubbo.common.extensionloader.ext6_wrap.impl;
import java.util.concurrent.atomic.AtomicInteger;
import com.alibaba.dubbo.common.URL;
import com.alibaba.dubbo.common.extensionloader.ext6_wrap.WrappedExt;
/**
* @author ding.lid
*/
public class Ext5Wrapper1 implements WrappedExt {
WrappedExt instance;
public static AtomicInteger echoCount = new AtomicInteger();
public Ext5Wrapper1(WrappedExt instance) {
this.instance = instance;
}
public String echo(URL url, String s) {
echoCount.incrementAndGet();
return instance.echo(url, s);
}
} | apache-2.0 |
dagnir/aws-sdk-java | aws-java-sdk-route53/src/main/java/com/amazonaws/services/route53/model/transform/TrafficPolicyAlreadyExistsExceptionUnmarshaller.java | 1650 | /*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.route53.model.transform;
import org.w3c.dom.Node;
import javax.annotation.Generated;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.transform.StandardErrorUnmarshaller;
import com.amazonaws.services.route53.model.TrafficPolicyAlreadyExistsException;
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class TrafficPolicyAlreadyExistsExceptionUnmarshaller extends StandardErrorUnmarshaller {
public TrafficPolicyAlreadyExistsExceptionUnmarshaller() {
super(TrafficPolicyAlreadyExistsException.class);
}
@Override
public AmazonServiceException unmarshall(Node node) throws Exception {
// Bail out if this isn't the right error code that this
// marshaller understands
String errorCode = parseErrorCode(node);
if (errorCode == null || !errorCode.equals("TrafficPolicyAlreadyExists"))
return null;
TrafficPolicyAlreadyExistsException e = (TrafficPolicyAlreadyExistsException) super.unmarshall(node);
return e;
}
}
| apache-2.0 |
dagnir/aws-sdk-java | aws-java-sdk-ec2/src/main/java/com/amazonaws/services/ec2/model/transform/DeleteSnapshotResultStaxUnmarshaller.java | 2215 | /*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ec2.model.transform;
import javax.xml.stream.events.XMLEvent;
import javax.annotation.Generated;
import com.amazonaws.services.ec2.model.*;
import com.amazonaws.transform.Unmarshaller;
import com.amazonaws.transform.StaxUnmarshallerContext;
import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*;
/**
* DeleteSnapshotResult StAX Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DeleteSnapshotResultStaxUnmarshaller implements Unmarshaller<DeleteSnapshotResult, StaxUnmarshallerContext> {
public DeleteSnapshotResult unmarshall(StaxUnmarshallerContext context) throws Exception {
DeleteSnapshotResult deleteSnapshotResult = new DeleteSnapshotResult();
int originalDepth = context.getCurrentDepth();
int targetDepth = originalDepth + 1;
if (context.isStartOfDocument())
targetDepth += 1;
while (true) {
XMLEvent xmlEvent = context.nextEvent();
if (xmlEvent.isEndDocument())
return deleteSnapshotResult;
if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) {
} else if (xmlEvent.isEndElement()) {
if (context.getCurrentDepth() < originalDepth) {
return deleteSnapshotResult;
}
}
}
}
private static DeleteSnapshotResultStaxUnmarshaller instance;
public static DeleteSnapshotResultStaxUnmarshaller getInstance() {
if (instance == null)
instance = new DeleteSnapshotResultStaxUnmarshaller();
return instance;
}
}
| apache-2.0 |
johnjohndoe/apps-android-commons | commons/src/main/java/org/wikimedia/commons/contributions/MediaListAdapter.java | 1964 | package fr.free.nrw.commons.contributions;
import android.app.Activity;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import com.android.volley.toolbox.ImageLoader;
import fr.free.nrw.commons.CommonsApplication;
import fr.free.nrw.commons.Media;
import fr.free.nrw.commons.R;
import java.util.ArrayList;
public class MediaListAdapter extends BaseAdapter {
private ArrayList<Media> mediaList;
private Activity activity;
public MediaListAdapter(Activity activity, ArrayList<Media> mediaList) {
this.mediaList = mediaList;
this.activity = activity;
}
public void updateMediaList(ArrayList<Media> newMediaList) {
// FIXME: Hack for now, replace with something more efficient later on
for(Media newMedia: newMediaList) {
boolean isDuplicate = false;
for(Media oldMedia: mediaList ) {
if(newMedia.getFilename().equals(oldMedia.getFilename())) {
isDuplicate = true;
break;
}
}
if(!isDuplicate) {
mediaList.add(0, newMedia);
}
}
}
public int getCount() {
return mediaList.size();
}
public Object getItem(int i) {
return mediaList.get(i);
}
public long getItemId(int i) {
return i;
}
public View getView(int i, View view, ViewGroup viewGroup) {
if(view == null) {
view = activity.getLayoutInflater().inflate(R.layout.layout_contribution, null, false);
view.setTag(new ContributionViewHolder(view));
}
Media m = (Media) getItem(i);
ContributionViewHolder holder = (ContributionViewHolder) view.getTag();
holder.imageView.setMedia(m, ((CommonsApplication)activity.getApplicationContext()).getImageLoader());
holder.titleView.setText(m.getDisplayTitle());
return view;
}
}
| apache-2.0 |
ethan0w/simple-java-blog | src/main/java/com/github/ethan0w/sjblog/BlogConstants.java | 159 | package com.github.ethan0w.sjblog;
public class BlogConstants {
public static int ARTICLE_COUNT_PER_PAGE = 5;
public static int RECENT_POST_NUM = 5;
} | apache-2.0 |
mydevotion/elastic-job-source | elastic-job-cloud/elastic-job-cloud-scheduler/src/test/java/com/dangdang/ddframe/job/cloud/scheduler/boot/MasterBootstrapTest.java | 2771 | /*
* Copyright 1999-2015 dangdang.com.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* </p>
*/
package com.dangdang.ddframe.job.cloud.scheduler.boot;
import com.dangdang.ddframe.job.cloud.scheduler.boot.env.BootstrapEnvironment;
import com.dangdang.ddframe.job.cloud.scheduler.boot.env.BootstrapEnvironment.EnvironmentArgument;
import com.dangdang.ddframe.job.cloud.scheduler.fixture.EmbedTestingServer;
import com.dangdang.ddframe.job.reg.base.CoordinatorRegistryCenter;
import com.dangdang.ddframe.job.reg.zookeeper.ZookeeperElectionService;
import org.apache.curator.framework.CuratorFramework;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.unitils.util.ReflectionUtils;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
import static org.mockito.Mockito.verify;
@RunWith(MockitoJUnitRunner.class)
public class MasterBootstrapTest {
@Mock
private CuratorFramework client;
@Mock
private CoordinatorRegistryCenter regCenter;
@Mock
private ZookeeperElectionService electionService;
@Mock
private CountDownLatch latch;
private MasterBootstrap masterBootstrap;
@Before
public void init() throws NoSuchFieldException {
EmbedTestingServer.start();
Properties properties = new Properties();
properties.setProperty(EnvironmentArgument.ZOOKEEPER_SERVERS.getKey(), "localhost:3181");
ReflectionUtils.setFieldValue(BootstrapEnvironment.getInstance(), "properties", properties);
masterBootstrap = new MasterBootstrap();
ReflectionUtils.setFieldValue(masterBootstrap, "regCenter", regCenter);
ReflectionUtils.setFieldValue(masterBootstrap, "electionService", electionService);
ReflectionUtils.setFieldValue(masterBootstrap, "latch", latch);
}
@Test
public void assertStart() throws InterruptedException {
masterBootstrap.start();
verify(electionService).startLeadership();
verify(latch).await();
}
@Test
public void assertStop() {
masterBootstrap.stop();
verify(electionService).stopLeadership();
}
}
| apache-2.0 |
kangaroo-server/kangaroo | kangaroo-server-authz/src/test/java/net/krotscheck/kangaroo/authz/admin/v1/resource/RoleServiceSearchTest.java | 8740 | /*
* Copyright (c) 2017 Michael Krotscheck
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package net.krotscheck.kangaroo.authz.admin.v1.resource;
import net.krotscheck.kangaroo.authz.admin.Scope;
import net.krotscheck.kangaroo.authz.common.database.entity.AbstractAuthzEntity;
import net.krotscheck.kangaroo.authz.common.database.entity.Application;
import net.krotscheck.kangaroo.authz.common.database.entity.ClientType;
import net.krotscheck.kangaroo.authz.common.database.entity.OAuthToken;
import net.krotscheck.kangaroo.authz.common.database.entity.Role;
import net.krotscheck.kangaroo.authz.common.database.entity.User;
import net.krotscheck.kangaroo.authz.oauth2.exception.RFC6749.InvalidScopeException;
import net.krotscheck.kangaroo.common.hibernate.id.IdUtil;
import net.krotscheck.kangaroo.common.hibernate.id.MalformedIdException;
import net.krotscheck.kangaroo.common.response.ListResponseEntity;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import javax.ws.rs.core.GenericType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import javax.ws.rs.core.UriBuilder;
import java.net.URI;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import static org.junit.Assert.assertTrue;
/**
* Test the search endpoint of the scope service.
*
* @author Michael Krotscheck
*/
@RunWith(Parameterized.class)
public final class RoleServiceSearchTest
extends AbstractServiceSearchTest<Role> {
/**
* Convenience generic type for response decoding.
*/
private static final GenericType<ListResponseEntity<Role>> LIST_TYPE =
new GenericType<ListResponseEntity<Role>>() {
};
/**
* Create a new instance of this parameterized test.
*
* @param clientType The type of client.
* @param tokenScope The client scope to issue.
* @param createUser Whether to create a new user.
*/
public RoleServiceSearchTest(final ClientType clientType,
final String tokenScope,
final Boolean createUser) {
super(Role.class, clientType, tokenScope, createUser);
}
/**
* Test parameters.
*
* @return The parameters passed to this test during every run.
*/
@Parameterized.Parameters
public static Collection parameters() {
return Arrays.asList(
new Object[]{
ClientType.Implicit,
Scope.ROLE_ADMIN,
false
},
new Object[]{
ClientType.Implicit,
Scope.ROLE,
false
},
new Object[]{
ClientType.Implicit,
Scope.ROLE_ADMIN,
true
},
new Object[]{
ClientType.Implicit,
Scope.ROLE,
true
},
new Object[]{
ClientType.ClientCredentials,
Scope.ROLE_ADMIN,
false
},
new Object[]{
ClientType.ClientCredentials,
Scope.ROLE,
false
});
}
/**
* Return the appropriate list type for this test suite.
*
* @return The list type, used for test decoding.
*/
@Override
protected GenericType<ListResponseEntity<Role>> getListType() {
return LIST_TYPE;
}
/**
* Return the list of entities which are owned by the given user.
* Includes scope checks.
*
* @param owner The owner of the entities.
* @return A list of entities (could be empty).
*/
@Override
protected List<Role> getOwnedEntities(final User owner) {
// Get all the owned clients.
return getAttached(owner).getApplications()
.stream()
.flatMap(a -> a.getRoles().stream())
.collect(Collectors.toList());
}
/**
* Return the list of field names on which this particular entity type
* has build a search index.
*
* @return An array of field names.
*/
protected String[] getSearchIndexFields() {
return new String[]{"name"};
}
/**
* Return the token scope required for admin access on this test.
*
* @return The correct scope string.
*/
@Override
protected String getAdminScope() {
return Scope.ROLE_ADMIN;
}
/**
* Return the token scope required for generic user access.
*
* @return The correct scope string.
*/
@Override
protected String getRegularScope() {
return Scope.ROLE;
}
/**
* Construct the request URL for this test given a specific resource ID.
*
* @param id The ID to use.
* @return The resource URL.
*/
@Override
protected URI getUrlForId(final String id) {
return UriBuilder.fromPath("/v1/role/")
.path(id)
.build();
}
/**
* Construct the request URL for this test given a specific resource ID.
*
* @param entity The entity to use.
* @return The resource URL.
*/
@Override
protected URI getUrlForEntity(final AbstractAuthzEntity entity) {
return getUrlForId(IdUtil.toString(entity.getId()));
}
/**
* Test that we can filter a search by an application ID.
*/
@Test
public void testSearchByApplication() {
String query = "many";
Application a = getSecondaryContext()
.getApplication();
OAuthToken token = getAdminToken();
Map<String, String> params = new HashMap<>();
params.put("q", query);
params.put("application", IdUtil.toString(a.getId()));
Response r = search(params, token);
// Determine result set.
List<Role> searchResults = getSearchResults(query);
List<Role> accessibleEntities = getAccessibleEntities(token);
List<Role> expectedResults = searchResults
.stream()
.filter((item) -> accessibleEntities.indexOf(item) > -1)
.filter((item) -> item.getApplication().equals(a))
.collect(Collectors.toList());
Integer expectedTotal = expectedResults.size();
int expectedResultSize = Math.min(10, expectedTotal);
Integer expectedOffset = 0;
Integer expectedLimit = 10;
if (isLimitedByClientCredentials()) {
assertErrorResponse(r, new InvalidScopeException());
} else if (!isAccessible(a, token)) {
assertErrorResponse(r, Status.BAD_REQUEST);
} else {
assertTrue(expectedTotal > 1);
assertListResponse(r,
expectedResultSize,
expectedOffset,
expectedLimit,
expectedTotal);
}
}
/**
* Test that an invalid application throws an error.
*/
@Test
public void testSearchByInvalidApplication() {
OAuthToken token = getAdminToken();
Map<String, String> params = new HashMap<>();
params.put("q", "many");
params.put("application", IdUtil.toString(IdUtil.next()));
Response r = search(params, token);
if (isLimitedByClientCredentials()) {
assertErrorResponse(r, new InvalidScopeException());
} else {
assertErrorResponse(r, new MalformedIdException());
}
}
/**
* Test that an malformed application throws an error.
*/
@Test
public void testSearchByMalformedApplication() {
Map<String, String> params = new HashMap<>();
params.put("q", "many");
params.put("application", "malformed");
Response r = search(params, getAdminToken());
assertErrorResponse(r, new MalformedIdException());
}
}
| apache-2.0 |
equella/Equella | Source/Plugins/Core/com.equella.core/src/com/tle/core/notification/migrate/MigrateNotifications.java | 4776 | /*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0, (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tle.core.notification.migrate;
import com.thoughtworks.xstream.annotations.XStreamOmitField;
import com.tle.beans.Institution;
import com.tle.beans.item.ItemId;
import com.tle.core.guice.Bind;
import com.tle.core.hibernate.impl.HibernateMigrationHelper;
import com.tle.core.hibernate.impl.TablesOnlyFilter;
import com.tle.core.migration.AbstractHibernateSchemaMigration;
import com.tle.core.migration.MigrationInfo;
import com.tle.core.migration.MigrationResult;
import com.tle.core.plugins.impl.PluginServiceImpl;
import java.util.Date;
import java.util.List;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import org.hibernate.Query;
import org.hibernate.ScrollableResults;
import org.hibernate.annotations.AccessType;
import org.hibernate.annotations.Index;
import org.hibernate.classic.Session;
@Bind
@SuppressWarnings("nls")
public class MigrateNotifications extends AbstractHibernateSchemaMigration {
private static final String KEY_PREFIX =
PluginServiceImpl.getMyPluginId(MigrateNotifications.class) + ".";
@Override
protected int countDataMigrations(HibernateMigrationHelper helper, Session session) {
return count(session, "from ItemUsersNotified");
}
@Override
protected void executeDataMigration(
HibernateMigrationHelper helper, MigrationResult result, Session session) {
Date convertDate = new Date();
Query query =
session.createSQLQuery(
"SELECT i.uuid, i.version, i.institution_id,"
+ " un.element FROM item_users_notified un INNER JOIN item i ON i.id = un.item_id");
ScrollableResults results = query.scroll();
while (results.next()) {
Object[] oldnote = results.get();
ItemId itemId = new ItemId((String) oldnote[0], ((Number) oldnote[1]).intValue());
Institution inst = new Institution();
inst.setDatabaseId(((Number) oldnote[2]).longValue());
FakeNotification notification = new FakeNotification();
notification.reason = FakeNotification.REASON_WENTLIVE;
notification.date = convertDate;
notification.itemid = itemId.toString();
notification.institution = inst;
notification.userTo = (String) oldnote[3];
session.save(notification);
session.flush();
session.clear();
}
}
@Override
protected List<String> getAddSql(HibernateMigrationHelper helper) {
return helper.getCreationSql(new TablesOnlyFilter("notification"));
}
@Override
protected Class<?>[] getDomainClasses() {
return new Class<?>[] {FakeNotification.class, Institution.class, FakeItemUsersNotified.class};
}
@Override
protected List<String> getDropModifySql(HibernateMigrationHelper helper) {
return helper.getDropTableSql("item_users_notified");
}
@Override
public MigrationInfo createMigrationInfo() {
return new MigrationInfo(KEY_PREFIX + "migratenotifications.title");
}
@Entity(name = "ItemUsersNotified")
public static class FakeItemUsersNotified {
@Id long id;
}
@Entity(name = "Notification")
@AccessType("field")
public static class FakeNotification {
public static final String REASON_WENTLIVE = "wentlive";
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
long id;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(nullable = false)
@Index(name = "inst_idx")
@XStreamOmitField
Institution institution;
@Column(length = 255, nullable = true)
@Index(name = "itemid_idx")
String itemid;
@Column(length = 8, nullable = false)
String reason;
@Column(length = 255, nullable = false)
@Index(name = "userto_idx")
String userTo;
@Column(nullable = false)
Date date;
@Column(length = 255)
String userFrom;
}
}
| apache-2.0 |
ijlhjj/demos | src/main/java/com/sweetmanor/demo/gui/swing/advanced/JTextPaneDemo.java | 1852 | package com.sweetmanor.demo.gui.swing.advanced;
import java.awt.Color;
import javax.swing.JFrame;
import javax.swing.JTextPane;
import javax.swing.text.SimpleAttributeSet;
import javax.swing.text.StyleConstants;
import javax.swing.text.StyledDocument;
import com.sweetmanor.utils.FrameUtil;
/**
* JTextPane示例:格式化文档
*
* @version 1.0 2014-08-26
* @author ijlhjj
*/
public class JTextPaneDemo {
JFrame frame = new JFrame("JTextPane示例");
public void init() {
JTextPane txtPanel = new JTextPane();
frame.add(txtPanel);
txtPanel.setEditable(false);
txtPanel.setText("疯狂Android讲义\n" + "疯狂Java讲义\n" + "轻量级Java EE企业应用实战\n");
StyledDocument style = txtPanel.getStyledDocument();// 文档格式
SimpleAttributeSet android = new SimpleAttributeSet();
StyleConstants.setForeground(android, Color.RED);
StyleConstants.setFontSize(android, 24);
StyleConstants.setFontFamily(android, "Dialog");
StyleConstants.setUnderline(android, true);
style.setCharacterAttributes(0, 12, android, true);// 此格式适用前12个字符
SimpleAttributeSet java = new SimpleAttributeSet();
StyleConstants.setForeground(java, Color.BLUE);
StyleConstants.setFontSize(java, 30);
StyleConstants.setFontFamily(java, "Arial Black");
StyleConstants.setBold(java, true);
style.setCharacterAttributes(12, 8, java, true);
SimpleAttributeSet javaee = new SimpleAttributeSet();
StyleConstants.setForeground(javaee, Color.GREEN);
StyleConstants.setFontSize(javaee, 40);
StyleConstants.setItalic(javaee, true);
style.setCharacterAttributes(21, 10, javaee, true);
frame.setSize(600, 400);
FrameUtil.center(frame);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.setVisible(true);
}
public static void main(String[] args) {
new JTextPaneDemo().init();
}
}
| apache-2.0 |
ContaAzul/redisson | redisson/src/main/java/org/redisson/RedissonSetMultimapValues.java | 24058 | /**
* Copyright 2016 Nikita Koksharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.redisson;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.redisson.api.RFuture;
import org.redisson.api.RSet;
import org.redisson.api.SortOrder;
import org.redisson.api.mapreduce.RCollectionMapReduce;
import org.redisson.client.codec.Codec;
import org.redisson.client.codec.ScanCodec;
import org.redisson.client.protocol.RedisCommand;
import org.redisson.client.protocol.RedisCommand.ValueType;
import org.redisson.client.protocol.RedisCommands;
import org.redisson.client.protocol.convertor.BooleanReplayConvertor;
import org.redisson.client.protocol.convertor.IntegerReplayConvertor;
import org.redisson.client.protocol.decoder.ListScanResult;
import org.redisson.client.protocol.decoder.ListScanResultReplayDecoder;
import org.redisson.client.protocol.decoder.NestedMultiDecoder;
import org.redisson.client.protocol.decoder.ObjectListReplayDecoder;
import org.redisson.client.protocol.decoder.ObjectSetReplayDecoder;
import org.redisson.client.protocol.decoder.ScanObjectEntry;
import org.redisson.command.CommandAsyncExecutor;
/**
* Set based Multimap Cache values holder
*
* @author Nikita Koksharov
*
* @param <V> value
*/
public class RedissonSetMultimapValues<V> extends RedissonExpirable implements RSet<V> {
private static final RedisCommand<ListScanResult<Object>> EVAL_SSCAN = new RedisCommand<ListScanResult<Object>>("EVAL", new NestedMultiDecoder(new ObjectListReplayDecoder<Object>(), new ListScanResultReplayDecoder()), 7, ValueType.MAP_KEY, ValueType.OBJECT);
private static final RedisCommand<Integer> EVAL_SIZE = new RedisCommand<Integer>("EVAL", new IntegerReplayConvertor(), 6, ValueType.MAP_KEY);
private static final RedisCommand<Set<Object>> EVAL_READALL = new RedisCommand<Set<Object>>("EVAL", new ObjectSetReplayDecoder<Object>(), 6, ValueType.MAP_KEY);
private static final RedisCommand<Boolean> EVAL_CONTAINS_VALUE = new RedisCommand<Boolean>("EVAL", new BooleanReplayConvertor(), 6, Arrays.asList(ValueType.MAP_KEY, ValueType.MAP_VALUE));
private static final RedisCommand<Boolean> EVAL_CONTAINS_ALL_WITH_VALUES = new RedisCommand<Boolean>("EVAL", new BooleanReplayConvertor(), 7, ValueType.OBJECTS);
private final RSet<V> set;
private final Object key;
private final String timeoutSetName;
public RedissonSetMultimapValues(Codec codec, CommandAsyncExecutor commandExecutor, String name, String timeoutSetName, Object key) {
super(codec, commandExecutor, name);
this.timeoutSetName = timeoutSetName;
this.key = key;
this.set = new RedissonSet<V>(codec, commandExecutor, name, null);
}
@Override
public int size() {
return get(sizeAsync());
}
@Override
public <KOut, VOut> RCollectionMapReduce<V, KOut, VOut> mapReduce() {
return null;
}
@Override
public RFuture<Boolean> clearExpireAsync() {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
public RFuture<Boolean> expireAsync(long timeToLive, TimeUnit timeUnit) {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
public RFuture<Boolean> expireAtAsync(long timestamp) {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
public RFuture<Long> remainTimeToLiveAsync() {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
public RFuture<Void> renameAsync(String newName) {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
@Override
public RFuture<Boolean> renamenxAsync(String newName) {
throw new UnsupportedOperationException("This operation is not supported for SetMultimap values Set");
}
public RFuture<Boolean> deleteAsync() {
return commandExecutor.evalWriteAsync(getName(), codec, RedisCommands.EVAL_BOOLEAN,
"local expireDate = 92233720368547758; " +
"local expireDateScore = redis.call('zscore', KEYS[1], ARGV[2]); "
+ "if expireDateScore ~= false then "
+ "expireDate = tonumber(expireDateScore) "
+ "end; "
+ "if expireDate <= tonumber(ARGV[1]) then "
+ "return 0;"
+ "end; " +
"local res = redis.call('zrem', KEYS[1], ARGV[2]); " +
"if res > 0 then " +
"redis.call('del', KEYS[2]); " +
"end; " +
"return res; ",
Arrays.<Object>asList(timeoutSetName, getName()), System.currentTimeMillis(), key);
}
@Override
public RFuture<Integer> sizeAsync() {
return commandExecutor.evalReadAsync(getName(), codec, EVAL_SIZE,
"local expireDate = 92233720368547758; " +
"local expireDateScore = redis.call('zscore', KEYS[1], ARGV[2]); "
+ "if expireDateScore ~= false then "
+ "expireDate = tonumber(expireDateScore) "
+ "end; "
+ "if expireDate <= tonumber(ARGV[1]) then "
+ "return 0;"
+ "end; "
+ "return redis.call('scard', KEYS[2]);",
Arrays.<Object>asList(timeoutSetName, getName()), System.currentTimeMillis(), key);
}
@Override
public boolean isEmpty() {
return size() == 0;
}
@Override
public boolean contains(Object o) {
return get(containsAsync(o));
}
@Override
public RFuture<Boolean> containsAsync(Object o) {
return commandExecutor.evalReadAsync(getName(), codec, EVAL_CONTAINS_VALUE,
"local expireDate = 92233720368547758; " +
"local expireDateScore = redis.call('zscore', KEYS[1], ARGV[2]); "
+ "if expireDateScore ~= false then "
+ "expireDate = tonumber(expireDateScore) "
+ "end; "
+ "if expireDate <= tonumber(ARGV[1]) then "
+ "return 0;"
+ "end; "
+ "return redis.call('sismember', KEYS[2], ARGV[3]);",
Arrays.<Object>asList(timeoutSetName, getName()), System.currentTimeMillis(), key, o);
}
private ListScanResult<ScanObjectEntry> scanIterator(InetSocketAddress client, long startPos) {
RFuture<ListScanResult<ScanObjectEntry>> f = commandExecutor.evalReadAsync(client, getName(), new ScanCodec(codec), EVAL_SSCAN,
"local expireDate = 92233720368547758; " +
"local expireDateScore = redis.call('zscore', KEYS[1], ARGV[3]); "
+ "if expireDateScore ~= false then "
+ "expireDate = tonumber(expireDateScore) "
+ "end; "
+ "if expireDate <= tonumber(ARGV[1]) then "
+ "return {0, {}};"
+ "end;"
+ "return redis.call('sscan', KEYS[2], ARGV[2]);",
Arrays.<Object>asList(timeoutSetName, getName()), System.currentTimeMillis(), startPos, key);
return get(f);
}
@Override
public Iterator<V> iterator() {
return new RedissonBaseIterator<V>() {
@Override
ListScanResult<ScanObjectEntry> iterator(InetSocketAddress client, long nextIterPos) {
return scanIterator(client, nextIterPos);
}
@Override
void remove(V value) {
RedissonSetMultimapValues.this.remove(value);
}
};
}
@Override
public RFuture<Set<V>> readAllAsync() {
return commandExecutor.evalReadAsync(getName(), codec, EVAL_READALL,
"local expireDate = 92233720368547758; " +
"local expireDateScore = redis.call('zscore', KEYS[1], ARGV[2]); "
+ "if expireDateScore ~= false then "
+ "expireDate = tonumber(expireDateScore) "
+ "end; "
+ "if expireDate <= tonumber(ARGV[1]) then "
+ "return {};"
+ "end; "
+ "return redis.call('smembers', KEYS[2]);",
Arrays.<Object>asList(timeoutSetName, getName()), System.currentTimeMillis(), key);
}
@Override
public Set<V> readAll() {
return get(readAllAsync());
}
@Override
public Object[] toArray() {
Set<Object> res = (Set<Object>) get(readAllAsync());
return res.toArray();
}
@Override
public <T> T[] toArray(T[] a) {
Set<Object> res = (Set<Object>) get(readAllAsync());
return res.toArray(a);
}
@Override
public boolean add(V e) {
return set.add(e);
}
@Override
public RFuture<Boolean> addAsync(V e) {
return set.addAsync(e);
}
@Override
public V removeRandom() {
return set.removeRandom();
}
@Override
public RFuture<V> removeRandomAsync() {
return commandExecutor.writeAsync(getName(), codec, RedisCommands.SPOP_SINGLE, getName());
}
@Override
public Set<V> removeRandom(int amount) {
return get(removeRandomAsync(amount));
}
@Override
public RFuture<Set<V>> removeRandomAsync(int amount) {
return commandExecutor.writeAsync(getName(), codec, RedisCommands.SPOP, getName(), amount);
}
@Override
public V random() {
return get(randomAsync());
}
@Override
public RFuture<V> randomAsync() {
return commandExecutor.writeAsync(getName(), codec, RedisCommands.SRANDMEMBER_SINGLE, getName());
}
@Override
public RFuture<Boolean> removeAsync(Object o) {
return commandExecutor.evalWriteAsync(getName(), codec, EVAL_CONTAINS_VALUE,
"local expireDate = 92233720368547758; " +
"local expireDateScore = redis.call('zscore', KEYS[1], ARGV[2]); "
+ "if expireDateScore ~= false then "
+ "expireDate = tonumber(expireDateScore) "
+ "end; "
+ "if expireDate <= tonumber(ARGV[1]) then "
+ "return 0;"
+ "end; "
+ "return redis.call('srem', KEYS[2], ARGV[3]) > 0 and 1 or 0;",
Arrays.<Object>asList(timeoutSetName, getName()), System.currentTimeMillis(), key, o);
}
@Override
public boolean remove(Object value) {
return get(removeAsync((V)value));
}
@Override
public RFuture<Boolean> moveAsync(String destination, V member) {
return commandExecutor.writeAsync(getName(), codec, RedisCommands.SMOVE, getName(), destination, member);
}
@Override
public boolean move(String destination, V member) {
return get(moveAsync(destination, member));
}
@Override
public boolean containsAll(Collection<?> c) {
return get(containsAllAsync(c));
}
@Override
public RFuture<Boolean> containsAllAsync(Collection<?> c) {
List<Object> args = new ArrayList<Object>(c.size() + 2);
try {
byte[] keyState = codec.getMapKeyEncoder().encode(key);
args.add(System.currentTimeMillis());
args.add(keyState);
args.addAll(c);
} catch (IOException e) {
throw new RuntimeException(e);
}
return commandExecutor.evalReadAsync(getName(), codec, EVAL_CONTAINS_ALL_WITH_VALUES,
"local expireDate = 92233720368547758; " +
"local expireDateScore = redis.call('zscore', KEYS[1], ARGV[2]); "
+ "if expireDateScore ~= false then "
+ "expireDate = tonumber(expireDateScore) "
+ "end; "
+ "if expireDate <= tonumber(ARGV[1]) then "
+ "return 0;"
+ "end; " +
"local s = redis.call('smembers', KEYS[2]);" +
"for i = 1, #s, 1 do " +
"for j = 2, #ARGV, 1 do "
+ "if ARGV[j] == s[i] "
+ "then table.remove(ARGV, j) end "
+ "end; "
+ "end;"
+ "return #ARGV == 2 and 1 or 0; ",
Arrays.<Object>asList(timeoutSetName, getName()), args.toArray());
}
@Override
public boolean addAll(Collection<? extends V> c) {
if (c.isEmpty()) {
return false;
}
return get(addAllAsync(c));
}
@Override
public RFuture<Boolean> addAllAsync(Collection<? extends V> c) {
List<Object> args = new ArrayList<Object>(c.size() + 1);
args.add(getName());
args.addAll(c);
return commandExecutor.writeAsync(getName(), codec, RedisCommands.SADD_BOOL, args.toArray());
}
@Override
public boolean retainAll(Collection<?> c) {
return get(retainAllAsync(c));
}
@Override
public RFuture<Boolean> retainAllAsync(Collection<?> c) {
List<Object> args = new ArrayList<Object>(c.size() + 2);
try {
byte[] keyState = codec.getMapKeyEncoder().encode(key);
args.add(System.currentTimeMillis());
args.add(keyState);
args.addAll(c);
} catch (IOException e) {
throw new RuntimeException(e);
}
return commandExecutor.evalWriteAsync(getName(), codec, EVAL_CONTAINS_ALL_WITH_VALUES,
"local expireDate = 92233720368547758; " +
"local expireDateScore = redis.call('zscore', KEYS[1], ARGV[2]); "
+ "if expireDateScore ~= false then "
+ "expireDate = tonumber(expireDateScore) "
+ "end; "
+ "if expireDate <= tonumber(ARGV[1]) then "
+ "return 0;"
+ "end; " +
"local changed = 0 " +
"local s = redis.call('smembers', KEYS[2]) "
+ "local i = 1 "
+ "while i <= #s do "
+ "local element = s[i] "
+ "local isInAgrs = false "
+ "for j = 2, #ARGV, 1 do "
+ "if ARGV[j] == element then "
+ "isInAgrs = true "
+ "break "
+ "end "
+ "end "
+ "if isInAgrs == false then "
+ "redis.call('SREM', KEYS[2], element) "
+ "changed = 1 "
+ "end "
+ "i = i + 1 "
+ "end "
+ "return changed ",
Arrays.<Object>asList(timeoutSetName, getName()), args.toArray());
}
@Override
public RFuture<Boolean> removeAllAsync(Collection<?> c) {
List<Object> args = new ArrayList<Object>(c.size() + 2);
try {
byte[] keyState = codec.getMapKeyEncoder().encode(key);
args.add(System.currentTimeMillis());
args.add(keyState);
args.addAll(c);
} catch (IOException e) {
throw new RuntimeException(e);
}
return commandExecutor.evalWriteAsync(getName(), codec, EVAL_CONTAINS_ALL_WITH_VALUES,
"local expireDate = 92233720368547758; " +
"local expireDateScore = redis.call('zscore', KEYS[1], ARGV[2]); "
+ "if expireDateScore ~= false then "
+ "expireDate = tonumber(expireDateScore) "
+ "end; "
+ "if expireDate <= tonumber(ARGV[1]) then "
+ "return 0;"
+ "end; " +
"local v = 0 " +
"for i = 2, #ARGV, 1 do "
+ "if redis.call('srem', KEYS[2], ARGV[i]) == 1 "
+ "then v = 1 end "
+"end "
+ "return v ",
Arrays.<Object>asList(timeoutSetName, getName()), args.toArray());
}
@Override
public boolean removeAll(Collection<?> c) {
return get(removeAllAsync(c));
}
@Override
public int union(String... names) {
return get(unionAsync(names));
}
@Override
public RFuture<Integer> unionAsync(String... names) {
List<Object> args = new ArrayList<Object>(names.length + 1);
args.add(getName());
args.addAll(Arrays.asList(names));
return commandExecutor.writeAsync(getName(), codec, RedisCommands.SUNIONSTORE_INT, args.toArray());
}
@Override
public Set<V> readUnion(String... names) {
return get(readUnionAsync(names));
}
@Override
public RFuture<Set<V>> readUnionAsync(String... names) {
List<Object> args = new ArrayList<Object>(names.length + 1);
args.add(getName());
args.addAll(Arrays.asList(names));
return commandExecutor.writeAsync(getName(), codec, RedisCommands.SUNION, args.toArray());
}
@Override
public void clear() {
delete();
}
@Override
public int diff(String... names) {
return get(diffAsync(names));
}
@Override
public RFuture<Integer> diffAsync(String... names) {
List<Object> args = new ArrayList<Object>(names.length + 1);
args.add(getName());
args.addAll(Arrays.asList(names));
return commandExecutor.writeAsync(getName(), codec, RedisCommands.SDIFFSTORE_INT, args.toArray());
}
@Override
public Set<V> readDiff(String... names) {
return get(readDiffAsync(names));
}
@Override
public RFuture<Set<V>> readDiffAsync(String... names) {
List<Object> args = new ArrayList<Object>(names.length + 1);
args.add(getName());
args.addAll(Arrays.asList(names));
return commandExecutor.writeAsync(getName(), codec, RedisCommands.SDIFF, args.toArray());
}
@Override
public int intersection(String... names) {
return get(intersectionAsync(names));
}
@Override
public RFuture<Integer> intersectionAsync(String... names) {
List<Object> args = new ArrayList<Object>(names.length + 1);
args.add(getName());
args.addAll(Arrays.asList(names));
return commandExecutor.writeAsync(getName(), codec, RedisCommands.SINTERSTORE_INT, args.toArray());
}
@Override
public Set<V> readIntersection(String... names) {
return get(readIntersectionAsync(names));
}
@Override
public RFuture<Set<V>> readIntersectionAsync(String... names) {
List<Object> args = new ArrayList<Object>(names.length + 1);
args.add(getName());
args.addAll(Arrays.asList(names));
return commandExecutor.writeAsync(getName(), codec, RedisCommands.SINTER, args.toArray());
}
public RFuture<Set<V>> readSortAsync(SortOrder order) {
return set.readSortAsync(order);
}
public Set<V> readSort(SortOrder order) {
return set.readSort(order);
}
public RFuture<Set<V>> readSortAsync(SortOrder order, int offset, int count) {
return set.readSortAsync(order, offset, count);
}
public Set<V> readSort(SortOrder order, int offset, int count) {
return set.readSort(order, offset, count);
}
public Set<V> readSort(String byPattern, SortOrder order) {
return set.readSort(byPattern, order);
}
public RFuture<Set<V>> readSortAsync(String byPattern, SortOrder order) {
return set.readSortAsync(byPattern, order);
}
public Set<V> readSort(String byPattern, SortOrder order, int offset, int count) {
return set.readSort(byPattern, order, offset, count);
}
public RFuture<Set<V>> readSortAsync(String byPattern, SortOrder order, int offset, int count) {
return set.readSortAsync(byPattern, order, offset, count);
}
public <T> Collection<T> readSort(String byPattern, List<String> getPatterns, SortOrder order) {
return set.readSort(byPattern, getPatterns, order);
}
public <T> RFuture<Collection<T>> readSortAsync(String byPattern, List<String> getPatterns, SortOrder order) {
return set.readSortAsync(byPattern, getPatterns, order);
}
public <T> Collection<T> readSort(String byPattern, List<String> getPatterns, SortOrder order, int offset,
int count) {
return set.readSort(byPattern, getPatterns, order, offset, count);
}
public <T> RFuture<Collection<T>> readSortAsync(String byPattern, List<String> getPatterns, SortOrder order,
int offset, int count) {
return set.readSortAsync(byPattern, getPatterns, order, offset, count);
}
public int sortTo(String destName, SortOrder order) {
return set.sortTo(destName, order);
}
public RFuture<Integer> sortToAsync(String destName, SortOrder order) {
return set.sortToAsync(destName, order);
}
public int sortTo(String destName, SortOrder order, int offset, int count) {
return set.sortTo(destName, order, offset, count);
}
public RFuture<Integer> sortToAsync(String destName, SortOrder order, int offset, int count) {
return set.sortToAsync(destName, order, offset, count);
}
public int sortTo(String destName, String byPattern, SortOrder order) {
return set.sortTo(destName, byPattern, order);
}
public RFuture<Integer> sortToAsync(String destName, String byPattern, SortOrder order) {
return set.sortToAsync(destName, byPattern, order);
}
public int sortTo(String destName, String byPattern, SortOrder order, int offset, int count) {
return set.sortTo(destName, byPattern, order, offset, count);
}
public RFuture<Integer> sortToAsync(String destName, String byPattern, SortOrder order, int offset, int count) {
return set.sortToAsync(destName, byPattern, order, offset, count);
}
public int sortTo(String destName, String byPattern, List<String> getPatterns, SortOrder order) {
return set.sortTo(destName, byPattern, getPatterns, order);
}
public RFuture<Integer> sortToAsync(String destName, String byPattern, List<String> getPatterns, SortOrder order) {
return set.sortToAsync(destName, byPattern, getPatterns, order);
}
public int sortTo(String destName, String byPattern, List<String> getPatterns, SortOrder order, int offset,
int count) {
return set.sortTo(destName, byPattern, getPatterns, order, offset, count);
}
public RFuture<Integer> sortToAsync(String destName, String byPattern, List<String> getPatterns, SortOrder order,
int offset, int count) {
return set.sortToAsync(destName, byPattern, getPatterns, order, offset, count);
}
}
| apache-2.0 |
Mack-Browson/ud2-elementos-programa | 16_factorial/src/com/company/Main.java | 715 | package com.company;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
public class Main {
/*
Nombre : Miguel Bruno
Apellido : Obiang Mitogo Nchama
*/
public static void main(String[] args) throws IOException {
// Un programa que calcule el factorial de un numero
BufferedReader br = new BufferedReader (new InputStreamReader (System.in));
System.out.print (" Escriba un numero : ");
int numero = Integer.parseInt (br.readLine ());
int factorial = 1;
while (numero!=0){
factorial = factorial*numero;
numero--;
}
System.out.println ("factorial : "+factorial);
}
}
| apache-2.0 |
zaradai/obsidian | java/core/src/main/java/com/zaradai/distributor/messaging/netty/handler/HandshakeCompletionEvent.java | 1200 | /**
* Copyright 2014 Zaradai
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.zaradai.distributor.messaging.netty.handler;
import com.google.common.base.Preconditions;
public class HandshakeCompletionEvent {
public static final HandshakeCompletionEvent SUCCESS = new HandshakeCompletionEvent();
private final Throwable cause;
private HandshakeCompletionEvent() {
cause = null;
}
public HandshakeCompletionEvent(Throwable cause) {
this.cause = Preconditions.checkNotNull(cause, "Invalid cause");
}
public boolean isSuccess() {
return cause == null;
}
public Throwable getCause() {
return cause;
}
}
| apache-2.0 |
vvvpic/xxtime | app/src/main/java/net/xxtime/activity/WeixinActivity.java | 4463 | package net.xxtime.activity;
import android.os.Handler;
import android.os.Message;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import com.alibaba.fastjson.JSONObject;
import com.longtu.base.util.StringUtils;
import com.longtu.base.util.ToastUtils;
import com.loopj.android.http.RequestParams;
import net.xxtime.R;
import net.xxtime.base.activity.BaseActivity;
import net.xxtime.bean.CommonBean;
import net.xxtime.bean.StudentAccountBean;
import net.xxtime.utils.SharedUtils;
/**
* 支付宝绑定
*/
public class WeixinActivity extends BaseActivity {
private EditText etAlipay, etName;
private Button btnOk;
private StudentAccountBean studentAccountBean;
private CommonBean commonBean;
private Message msg;
private Handler handler=new Handler(){
@Override
public void handleMessage(Message msg) {
switch (msg.what){
case 1:
studentAccountBean= JSONObject.parseObject(msg.obj.toString(),StudentAccountBean.class);
if (studentAccountBean!=null&&studentAccountBean.getBflag().equals("1")){
setAlipay();
}
break;
case 2:
commonBean=JSONObject.parseObject(msg.obj.toString(),CommonBean.class);
if (commonBean!=null&&commonBean.getBflag().equals("1")){
finish();
}
ToastUtils.show(WeixinActivity.this,commonBean.getMsg());
break;
}
}
};
private void setAlipay(){
if (studentAccountBean.getDefaultAList()!=null){
int p=studentAccountBean.getDefaultAList().size()-1;
if (!StringUtils.isEmpty(studentAccountBean.getDefaultAList().get(p).getAccountname())){
etName.setText(studentAccountBean.getDefaultAList().get(p).getAccountname());
}
if (!StringUtils.isEmpty(studentAccountBean.getDefaultAList().get(p).getAccountid())){
etAlipay.setText(studentAccountBean.getDefaultAList().get(p).getAccountid());
btnOk.setText("修改");
}
}
}
@Override
public void setContentView() {
setContentView(R.layout.activity_weixin);
}
@Override
public void initViews() {
etAlipay =(EditText)findViewById(R.id.etAlipay);
etName =(EditText)findViewById(R.id.etName);
btnOk =(Button) findViewById(R.id.btnOk);
}
@Override
public void initDatas() {
params=new RequestParams();
params.put("reqCode","getAccount");
params.put("userid", SharedUtils.getUserId(this));
params.put("type", 2);
Log.e("param==>",params.toString());
post("studentAccount",params,"getAccount");
}
@Override
public void setDatas() {
setTitle("绑定微信");
}
@Override
public void setListener() {
btnOk.setOnClickListener(this);
}
@Override
public void ResumeDatas() {
}
@Override
public void onClick(View v) {
switch (v.getId()){
case R.id.btnOk:
if (StringUtils.isEmpty(etAlipay.getText().toString())){
ToastUtils.show(this,"请输入微信号");
return;
}
if (StringUtils.isEmpty(etName.getText().toString())){
ToastUtils.show(this,"请输入微信姓名");
return;
}
params=new RequestParams();
params.put("reqCode","bindingAccount");
params.put("userid", SharedUtils.getUserId(this));
params.put("type", 2);
params.put("accountid", etAlipay.getText().toString());
params.put("accountname", etName.getText().toString());
Log.e("param==>",params.toString());
post("studentAccount",params,"bindingAccount");
break;
}
}
@Override
public void OnReceive(String requestname, String response) {
msg=new Message();
if (requestname.equals("getAccount")){
msg.what=1;
}else if (requestname.equals("bindingAccount")){
msg.what=2;
}
msg.obj=response;
handler.sendMessage(msg);
}
}
| apache-2.0 |
pdalbora/gosu-lang | gosu-core-api/src/main/java/gw/lang/reflect/TypeSystem.java | 30869 | /*
* Copyright 2014 Guidewire Software, Inc.
*/
package gw.lang.reflect;
import gw.config.CommonServices;
import gw.fs.IFile;
import gw.fs.IResource;
import gw.internal.gosu.parser.TypeSystemState;
import gw.lang.UnstableAPI;
import gw.lang.parser.GosuParserFactory;
import gw.lang.parser.GosuParserTypes;
import gw.lang.parser.IParserPart;
import gw.lang.parser.ISymbolTable;
import gw.lang.parser.ITypeUsesMap;
import gw.lang.parser.TypeVarToTypeMap;
import gw.lang.parser.exceptions.ParseException;
import gw.lang.parser.exceptions.ParseResultsException;
import gw.lang.parser.expressions.ITypeLiteralExpression;
import gw.lang.reflect.gs.IGenericTypeVariable;
import gw.lang.reflect.gs.IGosuArrayClass;
import gw.lang.reflect.gs.IGosuClass;
import gw.lang.reflect.gs.IGosuClassLoader;
import gw.lang.reflect.java.IJavaArrayType;
import gw.lang.reflect.java.IJavaBackedType;
import gw.lang.reflect.java.IJavaClassInfo;
import gw.lang.reflect.java.IJavaType;
import gw.lang.reflect.java.JavaTypes;
import gw.lang.reflect.module.IExecutionEnvironment;
import gw.lang.reflect.module.IModule;
import gw.lang.reflect.module.IProject;
import gw.util.IFeatureFilter;
import gw.util.perf.InvocationCounter;
import java.io.File;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.util.List;
import java.util.Set;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
@UnstableAPI
public class TypeSystem
{
private static final Lock GLOBAL_LOCK = new ReentrantLock();
public static InvocationCounter tyeRequestCounter = new InvocationCounter(false);
public static InvocationCounter tyeLoadingCounter = new InvocationCounter(false);
/**
* Gets the intrinsic type for a given class.<p>
* <p/>
* <b>Note:</b> you should use this method only if you do not have an
* Object of class <code>javaClass</code> to get the type from. If you
* do have such an object, use {@link #getFromObject} instead.
*
* @param javaClass the Class to convert to an intrinsic type
*
* @return the IType that corresponds to that class
*
* @see #getFromObject(Object)
*/
public static IType get( Class javaClass )
{
return CommonServices.getTypeSystem().get( javaClass );
}
public static IType get(Class javaClass, IModule module) {
TypeSystem.pushModule(module);
try {
return CommonServices.getTypeSystem().get( javaClass );
} finally {
TypeSystem.popModule(module);
}
}
public static IType get(IJavaClassInfo javaClassInfo) {
return CommonServices.getTypeSystem().get(javaClassInfo);
}
public static IType get(IJavaClassInfo classInfo, IModule module) {
TypeSystem.pushModule(module);
try {
return CommonServices.getTypeSystem().get(classInfo);
} finally {
TypeSystem.popModule(module);
}
}
/**
* Returns the intrinsic type for the given Object.
*
* @param object the object to get an IType for
*
* @return the IType for the object
*
* @see #get(Class)
*/
public static IType getFromObject( Object object )
{
return CommonServices.getTypeSystem().getFromObject(object);
}
public static IType getFromObject( Object object, IModule module)
{
pushModule(module);
try {
return CommonServices.getTypeSystem().getFromObject(object);
} finally {
popModule(module);
}
}
public static IType getByRelativeName( String relativeName ) throws ClassNotFoundException
{
return CommonServices.getTypeSystem().getByRelativeName(relativeName);
}
/**
* Gets an intrinsic type based on a relative name. This could either be the name of an entity,
* like "User", the name of a typekey, like "SystemPermission", or a class name, like
* "java.lang.String" (relative and fully qualified class names are the same as far as this factory
* is concerned). Names can have [] appended to them to create arrays, and multi-dimensional arrays
* are supported.
*
* @param relativeName the relative name of the type
* @param typeUses the map of used types to use when resolving
*
* @return the corresponding IType
*
* @throws ClassNotFoundException if the specified name doesn't correspond to any type
*/
public static IType getByRelativeName( String relativeName, ITypeUsesMap typeUses ) throws ClassNotFoundException
{
return CommonServices.getTypeSystem().getByRelativeName(relativeName, typeUses);
}
/**
* Gets an intrinsic type based on a fully-qualified name. This could either be the name of an entity,
* like "entity.User", the name of a typekey, like "typekey.SystemPermission", or a class name, like
* "java.lang.String". Names can have [] appended to them to create arrays, and multi-dimensional arrays
* are supported.
*
* @param fullyQualifiedName the fully qualified name of the type
*
* @return the corresponding IType
*
* @throws RuntimeException if the specified name doesn't correspond to any type
*/
public static IType getByFullName( String fullyQualifiedName )
{
return CommonServices.getTypeSystem().getByFullName(fullyQualifiedName);
}
public static IType getByFullName( String fullyQualifiedName, IModule module )
{
TypeSystem.pushModule(module);
try {
return CommonServices.getTypeSystem().getByFullName(fullyQualifiedName);
} finally {
TypeSystem.popModule(module);
}
}
/** @deprecated call getByFullName( String, IModule ) */
public static IType getByFullName( String fullyQualifiedName, String moduleName )
{
IModule module = moduleName == null ? getExecutionEnvironment().getJreModule() : getExecutionEnvironment().getModule(moduleName);
if (module == null) {
throw new RuntimeException("Could not find module with name " + moduleName + " for " + fullyQualifiedName);
}
return getByFullName(fullyQualifiedName, module);
}
public static void pushGlobalModule() {
TypeSystem.pushModule(getExecutionEnvironment().getGlobalModule());
}
public static void popGlobalModule() {
TypeSystem.popModule(getExecutionEnvironment().getGlobalModule());
}
/**
* Gets a type based on a fully-qualified name. This could either be the name of an entity,
* like "entity.User", the name of a typekey, like "typekey.SystemPermission", or a class name, like
* "java.lang.String". Names can have [] appended to them to create arrays, and multi-dimensional arrays
* are supported.
*
* This method behaves the same as getByFullName execept instead of throwing it returns null.
*
* @param fullyQualifiedName the fully qualified name of the type
*
* @return the corresponding IType or null if the type does not exist
*/
public static IType getByFullNameIfValid( String fullyQualifiedName )
{
return CommonServices.getTypeSystem().getByFullNameIfValid(fullyQualifiedName);
}
public static IType getByFullNameIfValidNoJava( String fullyQualifiedName )
{
return CommonServices.getTypeSystem().getByFullNameIfValidNoJava(fullyQualifiedName);
}
public static IType getByFullNameIfValid(String typeName, IModule module) {
TypeSystem.pushModule(module);
try {
return getByFullNameIfValid(typeName);
} finally {
TypeSystem.popModule(module);
}
}
public static void clearErrorTypes()
{
CommonServices.getTypeSystem().clearErrorTypes();
}
public static int getRefreshChecksum()
{
return CommonServices.getTypeSystem().getRefreshChecksum();
}
public static int getSingleRefreshChecksum()
{
return CommonServices.getTypeSystem().getSingleRefreshChecksum();
}
/**
* Converts a String name of a type into an IType.
*
* @param typeString the type name to parse
* @return the parsed type
* @throws IllegalArgumentException if the type string doesn't correspond to any known IType
*/
public static IType parseType( String typeString ) throws IllegalArgumentException
{
return CommonServices.getTypeSystem().parseType(typeString);
}
public static IType parseType( String typeString, ITypeUsesMap typeUsesMap ) throws IllegalArgumentException
{
return CommonServices.getTypeSystem().parseType( typeString, typeUsesMap );
}
public static IType parseType( String typeString, TypeVarToTypeMap actualParamByVarName ) throws IllegalArgumentException
{
return CommonServices.getTypeSystem().parseType(typeString, actualParamByVarName);
}
public static IType parseType( String typeString, TypeVarToTypeMap actualParamByVarName, ITypeUsesMap typeUsesMap ) throws IllegalArgumentException
{
return CommonServices.getTypeSystem().parseType(typeString, actualParamByVarName, typeUsesMap);
}
public static ITypeLiteralExpression parseTypeExpression( String typeString, TypeVarToTypeMap actualParamByVarName, ITypeUsesMap typeUsesMap ) throws ParseResultsException
{
return CommonServices.getTypeSystem().parseTypeExpression(typeString, actualParamByVarName, typeUsesMap);
}
/**
* Acquires the global type-system lock
*/
public static void lock()
{
GLOBAL_LOCK.lock();
}
/**
* Releases the global type-system lock
*/
public static void unlock()
{
GLOBAL_LOCK.unlock();
}
public static Lock getGlobalLock()
{
return GLOBAL_LOCK;
}
public static IType getComponentType( IType valueType )
{
return CommonServices.getTypeSystem().getComponentType(valueType);
}
public static INamespaceType getNamespace( String strFqNamespace )
{
return CommonServices.getTypeSystem().getNamespace(strFqNamespace);
}
public static INamespaceType getNamespace(String strType, IModule module) {
TypeSystem.pushModule(module);
try {
return getNamespace(strType);
} finally {
TypeSystem.popModule(module);
}
}
/**
* Returns all type names in the system for all type loaders.
* @return all type names in the system.
*/
public static Set<? extends CharSequence> getAllTypeNames()
{
return CommonServices.getTypeSystem().getAllTypeNames();
}
public static ITypeVariableType getOrCreateTypeVariableType( String strName, IType boundingType, IType enclosingType )
{
return CommonServices.getTypeSystem().getOrCreateTypeVariableType(strName, boundingType, enclosingType);
}
public static IFunctionType getOrCreateFunctionType( IMethodInfo mi )
{
return CommonServices.getTypeSystem().getOrCreateFunctionType(mi);
}
public static IFunctionType getOrCreateFunctionType( String strFunctionName, IType retType, IType[] paramTypes )
{
return CommonServices.getTypeSystem().getOrCreateFunctionType( strFunctionName, retType, paramTypes );
}
public static <E extends IType> E getPureGenericType( E type )
{
while( type.isParameterizedType() )
{
//noinspection unchecked
type = (E)type.getGenericType();
}
return type;
}
public static boolean isBeanType( IType typeSource )
{
return
typeSource != GosuParserTypes.STRING_TYPE() &&
typeSource != GosuParserTypes.BOOLEAN_TYPE() &&
// typeSource != GosuParserTypes.DATETIME_TYPE() &&
typeSource != GosuParserTypes.NULL_TYPE() &&
typeSource != GosuParserTypes.NUMBER_TYPE() &&
!typeSource.isPrimitive() &&
!typeSource.isArray() &&
!(typeSource instanceof IFunctionType) &&
!(typeSource instanceof IConstructorType) &&
!(typeSource instanceof IMetaType);
}
public static boolean isNumericType( IType intrType )
{
return intrType != null && ((intrType.isPrimitive() &&
intrType != JavaTypes.pBOOLEAN() &&
intrType != JavaTypes.pVOID()) ||
JavaTypes.NUMBER().isAssignableFrom(intrType) ||
JavaTypes.IDIMENSION().isAssignableFrom(intrType) ||
JavaTypes.CHARACTER().isAssignableFrom(intrType));
}
public static boolean isBoxedTypeFor( IType primitiveType, IType boxedType )
{
if( primitiveType != null && primitiveType.isPrimitive() )
{
if( primitiveType == JavaTypes.pBOOLEAN() && boxedType == JavaTypes.BOOLEAN() )
{
return true;
}
if( primitiveType == JavaTypes.pBYTE() && boxedType == JavaTypes.BYTE() )
{
return true;
}
if( primitiveType == JavaTypes.pCHAR() && boxedType == JavaTypes.CHARACTER() )
{
return true;
}
if( primitiveType == JavaTypes.pDOUBLE() && boxedType == JavaTypes.DOUBLE() )
{
return true;
}
if( primitiveType == JavaTypes.pFLOAT() && boxedType == JavaTypes.FLOAT() )
{
return true;
}
if( primitiveType == JavaTypes.pINT() && boxedType == JavaTypes.INTEGER() )
{
return true;
}
if( primitiveType == JavaTypes.pLONG() && boxedType == JavaTypes.LONG() )
{
return true;
}
if( primitiveType == JavaTypes.pSHORT() && boxedType == JavaTypes.SHORT() )
{
return true;
}
}
return false;
}
public static TypeVarToTypeMap mapTypeByVarName( IType ownersType, IType declaringType, boolean bKeepTypeVars )
{
return CommonServices.getTypeSystem().mapTypeByVarName(ownersType, declaringType, bKeepTypeVars);
}
public static IType getActualType( IType type, TypeVarToTypeMap actualParamByVarName, boolean bKeepTypeVars )
{
return CommonServices.getTypeSystem().getActualType(type, actualParamByVarName, bKeepTypeVars);
}
public static void inferTypeVariableTypesFromGenParamTypeAndConcreteType( IType genParamType, IType argType, TypeVarToTypeMap map )
{
CommonServices.getTypeSystem().inferTypeVariableTypesFromGenParamTypeAndConcreteType( genParamType, argType, map );
}
public static IErrorType getErrorType()
{
return CommonServices.getTypeSystem().getErrorType();
}
public static IErrorType getErrorType( String strErrantName )
{
return CommonServices.getTypeSystem().getErrorType( strErrantName );
}
public static IErrorType getErrorType( ParseResultsException pe )
{
return CommonServices.getTypeSystem().getErrorType(pe);
}
public static IDefaultTypeLoader getDefaultTypeLoader()
{
return CommonServices.getTypeSystem().getDefaultTypeLoader();
}
public static IType findParameterizedType( IType type, IType rhsType )
{
return CommonServices.getTypeSystem().findParameterizedType( type, rhsType );
}
public static void addTypeLoaderListenerAsWeakRef( ITypeLoaderListener listener )
{
CommonServices.getTypeSystem().addTypeLoaderListenerAsWeakRef( listener );
}
public static Set<String> getNamespacesFromTypeNames( Set<? extends CharSequence> allTypeNames, Set<String> namespaces )
{
return CommonServices.getTypeSystem().getNamespacesFromTypeNames( allTypeNames, namespaces );
}
public static void pushTypeLoader( IModule module, ITypeLoader loader )
{
CommonServices.getTypeSystem().pushTypeLoader(module, loader);
}
public static void removeTypeLoader( Class<? extends ITypeLoader> loader )
{
CommonServices.getTypeSystem().removeTypeLoader( loader );
}
public static IType getKeyType()
{
return CommonServices.getEntityAccess().getKeyType();
}
public static boolean areBeansEqual( Object o1, Object o2 )
{
return CommonServices.getTypeSystem().areBeansEqual( o1, o2 );
}
public static void pushIncludeAll()
{
CommonServices.getTypeSystem().pushIncludeAll();
}
public static void popIncludeAll()
{
CommonServices.getTypeSystem().popIncludeAll();
}
public static boolean isIncludeAll()
{
return CommonServices.getTypeSystem().isIncludeAll();
}
public static ITypeUsesMap getDefaultTypeUsesMap()
{
return CommonServices.getEntityAccess().getDefaultTypeUses();
}
public static IType getCurrentCompilingType() {
return CommonServices.getTypeSystem().getCurrentCompilingType();
}
public static IType getCompilingType( String strName )
{
return CommonServices.getTypeSystem().getCompilingType(strName);
}
public static void pushCompilingType(IType type) {
CommonServices.getTypeSystem().pushCompilingType(type);
}
public static void popCompilingType() {
CommonServices.getTypeSystem().popCompilingType();
}
public static String getUnqualifiedClassName( IType cls )
{
return cls == null ? "<null>" : cls.getRelativeName();
}
public static void pushSymTableCtx( ISymbolTable ctx )
{
CommonServices.getTypeSystem().pushSymTableCtx(ctx);
}
public static void popSymTableCtx()
{
CommonServices.getTypeSystem().popSymTableCtx();
}
public static ISymbolTable getSymTableCtx()
{
return CommonServices.getTypeSystem().getSymTableCtx();
}
public static <T extends ITypeLoader> T getTypeLoader( Class<? extends T> loaderClass )
{
return CommonServices.getTypeSystem().getTypeLoader( loaderClass, TypeSystem.getGlobalModule() );
}
public static <T extends ITypeLoader> T getTypeLoader( Class<? extends T> loaderClass, IModule module )
{
return CommonServices.getTypeSystem().getTypeLoader(loaderClass, module);
}
public static String getNameOfParams( IType[] paramTypes, boolean bRelative, boolean bWithEnclosingType )
{
return CommonServices.getTypeSystem().getNameOfParams(paramTypes, bRelative, bWithEnclosingType);
}
public static ISymbolTable getCompiledGosuClassSymbolTable()
{
return CommonServices.getTypeSystem().getCompiledGosuClassSymbolTable();
}
public static List<ITypeLoader> getAllTypeLoaders()
{
return CommonServices.getTypeSystem().getAllTypeLoaders();
}
public static String getGenericRelativeName( IType type, boolean bRelativeBounds )
{
return getGenericName( type, true, bRelativeBounds );
}
public static String getGenericName( IType type )
{
return getGenericName(type, false, false);
}
public static String getGenericName( IType type, boolean bRelative, boolean bRelativeBounds )
{
if( !type.isGenericType() || type.isParameterizedType() )
{
return bRelative ? type.getRelativeName() : type.getName();
}
StringBuilder sb = new StringBuilder( (bRelative ? type.getRelativeName() : type.getName()) + "<" );
IGenericTypeVariable[] typeVars = type.getGenericTypeVariables();
for( int i = 0; i < typeVars.length; i++ )
{
IGenericTypeVariable typeVar = typeVars[i];
sb.append( typeVar.getNameWithBounds( bRelativeBounds ) );
if( i < typeVars.length - 1 )
{
sb.append( ',' );
}
}
sb.append( '>' );
return sb.toString();
}
public static IPropertyInfo getPropertyInfo( IType classBean, String strProperty, IFeatureFilter filter, IParserPart parserBase, IScriptabilityModifier scriptabilityConstraint) throws ParseException
{
return CommonServices.getGosuIndustrialPark().getPropertyInfo(classBean, strProperty, filter, parserBase, scriptabilityConstraint);
}
public static List<? extends IPropertyInfo> getProperties( ITypeInfo beanInfo, IType classSource )
{
return CommonServices.getGosuIndustrialPark().getProperties(beanInfo, classSource);
}
public static List<? extends IMethodInfo> getMethods( ITypeInfo beanInfo, IType ownersIntrinsicType )
{
return CommonServices.getGosuIndustrialPark().getMethods(beanInfo, ownersIntrinsicType);
}
@Deprecated // calls TypeSystem.get( javaClass )
public static IType getJavaType(Class javaClass) {
return CommonServices.getTypeSystem().getJavaType(javaClass);
}
public static String getNameWithQualifiedTypeVariables(IType type) {
return CommonServices.getTypeSystem().getNameWithQualifiedTypeVariables(type);
}
public static IType getDefaultParameterizedType(IType type) {
return CommonServices.getTypeSystem().getDefaultParameterizedType(type);
}
public static IType getDefaultParameterizedTypeWithTypeVars(IType type) {
return CommonServices.getTypeSystem().getDefaultParameterizedTypeWithTypeVars(type);
}
public static boolean canCast(IType lhsType, IType rhsType) {
return CommonServices.getTypeSystem().canCast(lhsType, rhsType);
}
public static void removeTypeLoaderListener(ITypeLoaderListener listener) {
CommonServices.getTypeSystem().removeTypeLoaderListener(listener);
}
public static IJavaType getPrimitiveType(String name) {
return CommonServices.getTypeSystem().getPrimitiveType(name);
}
public static IType getPrimitiveType(IType boxType) {
return CommonServices.getTypeSystem().getPrimitiveType(boxType);
}
public static IType getBoxType(IType primitiveType) {
return CommonServices.getTypeSystem().getBoxType(primitiveType);
}
public static IType[] boxPrimitiveTypeParams( IType[] typeParams )
{
IType[] newTypes = new IType[typeParams.length];
for( int i = 0; i < typeParams.length; i++ )
{
if( typeParams[i].isPrimitive() )
{
newTypes[i] = TypeSystem.getBoxType( typeParams[i] );
}
else
{
newTypes[i] = typeParams[i];
}
}
return newTypes;
}
public static IExecutionEnvironment getExecutionEnvironment()
{
return CommonServices.getTypeSystem().getExecutionEnvironment();
}
public static IExecutionEnvironment getExecutionEnvironment( IProject project )
{
return CommonServices.getTypeSystem().getExecutionEnvironment(project);
}
public static IModule getCurrentModule()
{
return CommonServices.getTypeSystem().getCurrentModule();
}
/**
* IMPORTANT: The only time you should call this method is:
* 1) within a class implementing IType, or
* 2) wrapping a call to a Type constructor, typically within a type loader
* e.g., TypeSystem.getOrCreateTypeReference( new MyVeryOwnType() )
*
* Gets or creates a type ref for the specified type.
*
* @param type A raw or proxied type.
* @return If the type is already a reference, returns the type as-is, otherwise creates and returns a new type ref.
*/
public static ITypeRef getOrCreateTypeReference( IType type )
{
return CommonServices.getTypeSystem().getOrCreateTypeReference(type);
}
/**
* IMPORTANT: The only time you should call this method is:
* 1) wrapping a call to a Type constructor, typically within a type loader
* e.g., TypeSystem.getOrCreateTypeReference( new MyVeryOwnType() )
*
* Do NOT call this when creating the type. Instead call getOrCreateTypeReference
* Gets or creates a type ref for the specified type.
*
* This method will NOT update the type reference in the proxy.
*
* @param type A raw or proxied type.
* @return returns the already created type reference or throws if the ref does not exist
*/
public static ITypeRef getTypeReference( IType type )
{
return CommonServices.getTypeSystem().getTypeReference(type);
}
public static IType getTypeFromObject( Object obj )
{
return CommonServices.getTypeSystem().getTypeFromObject(obj);
}
/**
* Parses a type name such as Iterable<Claim>.
* @param typeName the name to parse
* @return the type
*/
public static IType parseTypeLiteral(String typeName) {
try {
IType type = GosuParserFactory.createParser(typeName).parseTypeLiteral(null).getType().getType();
if (type instanceof IErrorType) {
throw new RuntimeException("Type not found: " + typeName);
}
return type;
} catch (ParseResultsException e) {
throw new RuntimeException("Type not found: " + typeName, e);
}
}
public static boolean isExpandable( IType type )
{
return CommonServices.getTypeSystem().isExpandable(type);
}
public static IType boundTypes(IType targetType, List<IType> typesToBound) {
return CommonServices.getTypeSystem().boundTypes(targetType, typesToBound);
}
public static IJavaClassInfo getJavaClassInfo(Class jClass) {
return getJavaClassInfo(jClass, TypeSystem.getCurrentModule());
}
public static IJavaClassInfo getJavaClassInfo(Class jClass, IModule module) {
if (jClass == null) {
return null;
}
String fqn = jClass.getName().replace('$', '.');
if (IType.class.isAssignableFrom(jClass) && fqn.endsWith(ITypeRefFactory.SYSTEM_PROXY_SUFFIX)) {
IJavaType type = (IJavaType) get(jClass);
return type.getBackingClassInfo();
} else if (jClass.isArray()) {
Class componentType = jClass.getComponentType();
IJavaClassInfo javaClassInfo = getJavaClassInfo(componentType, module);
return javaClassInfo.getArrayType();
} else if(Proxy.class.isAssignableFrom(jClass)) {
IDefaultTypeLoader defaultTypeLoader = module.getModuleTypeLoader().getDefaultTypeLoader();
return defaultTypeLoader.getJavaClassInfoForClassDirectly(jClass, module);
} else {
return getJavaClassInfo(fqn, module);
}
}
public static Method[] getDeclaredMethods( Class cls )
{
return CommonServices.getGosuIndustrialPark().getDeclaredMethods( cls );
}
public static boolean isBytecodeType( IType type )
{
return type instanceof IJavaType ||
type instanceof IGosuClass ||
type instanceof IGosuArrayClass ||
type instanceof IJavaArrayType;
}
public static IType getTypeFromJavaBackedType(IType type) {
if( type instanceof IJavaType) {
return ((IJavaType)type).getTypeFromJavaBackedType();
} else {
return type;
}
}
public static IType getTypeFromJavaBasedType(IJavaBackedType javaType) {
int iDims = 0;
IJavaClassInfo ci = javaType.getBackingClassInfo();
while( ci.isArray() )
{
iDims++;
ci = ci.getComponentType();
}
IType reresovledType = TypeSystem.getByFullName( ci.getName().replace('$', '.'), javaType.getTypeLoader().getModule() );
for( int i = 0; i < iDims; i++ )
{
reresovledType = reresovledType.getArrayType();
}
if (javaType.isParameterizedType()) {
reresovledType = reresovledType.getParameterizedType(javaType.getTypeParameters());
}
return reresovledType;
}
public static IJavaClassInfo getJavaClassInfo(String fullyQualifiedName, IModule module) {
if( module == null ) {
module = TypeSystem.getGlobalModule();
}
for (IModule m : module.getModuleTraversalList()) {
TypeSystem.pushModule(m);
try {
IDefaultTypeLoader defaultTypeLoader = m.getModuleTypeLoader().getDefaultTypeLoader();
if (defaultTypeLoader != null) {
IJavaClassInfo javaClassInfo = defaultTypeLoader.getJavaClassInfo(fullyQualifiedName);
if (javaClassInfo != null) {
return javaClassInfo;
}
}
} finally {
TypeSystem.popModule(m);
}
}
return null;
}
public static IModule getModuleFromType(IType type) {
IModule result = null;
if(type != null) {
ITypeLoader loader = type.getTypeLoader();
if(loader == null) {
IType candidate = type.getEnclosingType();
if(candidate != type) {
result = getModuleFromType(candidate);
}
// FIXME circular loop where type == candiate implies null result.
} else {
result = loader.getModule();
}
}
return result;
}
public static void pushModule(IModule gosuModule) {
CommonServices.getTypeSystem().pushModule(gosuModule);
}
public static void popModule(IModule gosuModule) {
CommonServices.getTypeSystem().popModule(gosuModule);
}
public static IGosuClassLoader getGosuClassLoader() {
return CommonServices.getTypeSystem().getGosuClassLoader();
}
public static void dumpGosuClassLoader() {
CommonServices.getTypeSystem().dumpGosuClassLoader();
}
public static IModule getGlobalModule() {
return getExecutionEnvironment().getGlobalModule();
}
public static boolean isSingleModuleMode() {
return CommonServices.getTypeSystem().isSingleModuleMode();
}
public static IMetaType getDefaultType() {
return CommonServices.getTypeSystem().getDefaultType();
}
public static void shutdown(IExecutionEnvironment execEnv) {
try {
TypeSystem.pushModule( execEnv.getGlobalModule() );
CommonServices.getTypeSystem().shutdown();
} finally {
// TypeSystem.popModule( execEnv.getGlobalModule());
}
}
public static void addShutdownListener(TypeSystemShutdownListener listener) {
CommonServices.getTypeSystem().addShutdownListener(listener);
}
public static TypeSystemState getState() {
return getExecutionEnvironment().getState();
}
public static String[] getTypesForFile(IModule module, IFile file) {
return CommonServices.getTypeSystem().getTypesForFile(module, file);
}
public static void refresh( boolean bRefreshCaches )
{
CommonServices.getTypeSystem().refresh(bRefreshCaches);
}
public static void refresh(IModule module)
{
CommonServices.getTypeSystem().refresh(module);
}
/**
* Refresh just the specified type i.e., a gosu editor calls this on changes
*/
public static void refresh(ITypeRef typeRef)
{
CommonServices.getTypeSystem().refresh(typeRef);
}
/**
* DO NOT USE OR DELETE. Called form the debugging process (IDE).
* @param filePaths
*/
public static void refreshedFiles(String[] filePaths) {
for (String filePath : filePaths) {
IFile file = CommonServices.getFileSystem().getIFile(new File(filePath));
if (file != null) {
TypeSystem.refreshed(file);
}
}
}
public static void refreshed(IResource resource) {
CommonServices.getTypeSystem().refreshed(resource, null, RefreshKind.MODIFICATION);
}
public static void deleted(IResource resource) {
CommonServices.getTypeSystem().refreshed(resource, null, RefreshKind.DELETION);
}
public static void deleted(IResource resource, String typeName) {
CommonServices.getTypeSystem().refreshed(resource, typeName, RefreshKind.DELETION);
}
public static void created(IResource resource) {
CommonServices.getTypeSystem().refreshed(resource, null, RefreshKind.CREATION);
}
public static void created(IResource resource, String typeName) {
CommonServices.getTypeSystem().refreshed(resource, typeName, RefreshKind.CREATION);
}
public static boolean isDeleted(IType type) {
if (type instanceof IErrorType) {
// must return false, an error type does not mean deleted
return false;
}
// a type that's not proxied is never deleted
return type instanceof ITypeRef && ((ITypeRef)type).isDeleted();
}
public static IType replaceTypeVariableTypeParametersWithBoundingTypes( IType type, IType enclosingType ) {
return CommonServices.getTypeSystem().replaceTypeVariableTypeParametersWithBoundingTypes( type, enclosingType );
}
public static IModule getJreModule() {
return getExecutionEnvironment().getJreModule();
}
}
| apache-2.0 |
rooney0928/qnd2 | app/src/main/java/com/app/qunadai/content/ui/me/AboutActivity.java | 953 | package com.app.qunadai.content.ui.me;
import android.view.View;
import com.app.qunadai.R;
import com.app.qunadai.content.base.BaseActivity;
/**
* Created by wayne on 2017/5/15.
*/
public class AboutActivity extends BaseActivity {
@Override
protected void updateTopViewHideAndShow() {
// setTitleBarStatus(TITLE_ON_BACK_ON);
setTitleText("关于我们");
}
@Override
protected View createCenterView() {
return View.inflate(this, R.layout.activity_about,null);
}
@Override
protected View createBottomView() {
return null;
}
@Override
protected void initView() {
}
@Override
public void initViewData() {
}
@Override
public void updateView(Object serverData) {
}
@Override
public void updateError(String error) {
}
@Override
public void requestStart() {
}
@Override
public void requestEnd() {
}
}
| apache-2.0 |
abono/Kaya-CMS | KayaCMS-Admin/src/main/java/com/aranya/kayacms/controller/admin/login/LogInController.java | 2797 | package com.aranya.kayacms.controller.admin.login;
import com.aranya.kayacms.beans.adminuser.AdminUser;
import com.aranya.kayacms.beans.website.WebSite;
import com.aranya.kayacms.beans.website.WebSiteId;
import com.aranya.kayacms.controller.BaseController;
import com.aranya.kayacms.controller.admin.util.AdminSessionUtil;
import com.aranya.kayacms.exception.KayaAccessDeniedException;
import com.aranya.kayacms.exception.KayaServiceException;
import com.aranya.kayacms.service.AdminUserService;
import com.aranya.kayacms.util.RequestUtil;
import javax.servlet.http.HttpServletRequest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
@Controller
@RequestMapping("/api/admin")
public class LogInController extends BaseController {
@Autowired private AdminUserService adminUserService;
@RequestMapping(value = "/check", method = RequestMethod.GET)
public @ResponseBody LogInResponse isLoggedIn(HttpServletRequest request)
throws KayaServiceException {
LogInResponse logIn = AdminSessionUtil.getAdminUser(request);
return logIn;
}
@RequestMapping(value = "/logIn", method = RequestMethod.POST)
public @ResponseBody LogInResponse logIn(
HttpServletRequest request, @RequestBody LogInRequest logInRequest)
throws KayaServiceException, KayaAccessDeniedException {
WebSite webSite = RequestUtil.getWebSite(request);
WebSiteId webSiteId = webSite.getWebSiteId();
String userName = logInRequest.getUserName();
String password = logInRequest.getPassword();
AdminUser adminUser = adminUserService.getAdminUser(webSiteId, userName, password);
if (adminUser == null) {
throw new KayaAccessDeniedException("Invalid user name or password");
} else {
try {
LogInResponse logIn =
LogInResponse.builder()
.adminUserId(adminUser.getAdminUserId().getId())
.firstName(adminUser.getFirstName())
.lastName(adminUser.getLastName())
.email(adminUser.getEmail())
.userName(adminUser.getUserName())
.build();
AdminSessionUtil.setAdminUser(request, logIn);
return logIn;
} catch (Exception e) {
throw new KayaServiceException(e);
}
}
}
@RequestMapping(value = "/logOut", method = RequestMethod.GET)
public @ResponseBody boolean logOut(HttpServletRequest request) throws KayaServiceException {
AdminSessionUtil.setAdminUser(request, null);
return true;
}
}
| apache-2.0 |
zhongl/stompize | core/src/test/java/com/github/zhongl/stompize/ForeachFrameOfTest.java | 4460 | package com.github.zhongl.stompize;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
/** @author <a href="mailto:zhong.lunfu@gmail.com">zhongl<a> */
public class ForeachFrameOfTest {
@Test
public void shouldVisitAbstraceMethodOfCommandWithContent() throws Exception {
new ForeachFrameOf(CommandWithContent.class) {
@Override
protected boolean exclude(Method method) {
return !Modifier.isAbstract(method.getModifiers());
}
@Override
protected void command(String name, Method method) {
assertThat(name, is("command"));
}
@Override
protected void required(String headerName, int index) {
assertThat(headerName, is("simple"));
assertThat(index, is(1));
}
@Override
protected void content(int index) {
assertThat(index, is(2));
}
@Override
protected void optionals(int index) {
assertThat(index, is(3));
}
}.apply();
}
@Test
public void shouldSupportCamelNameToDashedLowCase() throws Exception {
new ForeachFrameOf(CommandWithCamelName.class) {
@Override
protected boolean exclude(Method method) {
return !Modifier.isAbstract(method.getModifiers());
}
@Override
protected void command(String name, Method method) { }
@Override
protected void required(String headerName, int index) {
assertThat(headerName, is("camel-name"));
assertThat(index, is(1));
}
@Override
protected void content(int index) { }
@Override
protected void optionals(int index) { }
}.apply();
}
@Test
public void shouldVisitAbstraceMethodOfCommandWithOutContent() throws Exception {
new ForeachFrameOf(CommandWithOutContent.class) {
@Override
protected boolean exclude(Method method) {
return !Modifier.isAbstract(method.getModifiers());
}
@Override
protected void command(String name, Method method) { }
@Override
protected void required(String headerName, int index) { }
@Override
protected void content(int index) {
assertThat(index, is(-1));
}
@Override
protected void optionals(int index) { }
}.apply();
}
@Test(expected = IllegalArgumentException.class)
public void shouldComplainUnexpectArgumentType() throws Exception {
new ForeachFrameOf(CommandWithNetherHeaderNorContent.class) {
@Override
protected boolean exclude(Method method) {
return !Modifier.isAbstract(method.getModifiers());
}
@Override
protected void command(String name, Method method) { }
@Override
protected void required(String headerName, int index) { }
@Override
protected void content(int index) { }
@Override
protected void optionals(int index) { }
}.apply();
}
static abstract class CommandWithContent {
public abstract void command(Simple simple, Plain content, Header... optionals);
}
static abstract class CommandWithOutContent {
public abstract void command(Simple simple, Header... optionals);
}
static abstract class CommandWithCamelName {
public abstract void command(CamelName camelName);
}
static abstract class CommandWithNetherHeaderNorContent {
public abstract void command(NetherHeaderNorContent netherHeaderNorContent);
}
static class Simple extends Header {
protected Simple(String value) { super(value); }
}
static class CamelName extends Header {
protected CamelName(String value) { super(value); }
}
static class Plain extends Content<String> {
protected Plain(String value) { super(value, ""); }
@Override
protected String value() { return value; }
}
static class NetherHeaderNorContent {}
}
| apache-2.0 |
jexp/idea2 | plugins/groovy/src/org/jetbrains/plugins/groovy/config/ui/CreateLibraryDialog.java | 1509 | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.groovy.config.ui;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import javax.swing.*;
import java.awt.event.KeyEvent;
public class CreateLibraryDialog extends DialogWrapper {
private JPanel contentPane;
private JRadioButton myInProject;
private JRadioButton myGlobal;
public CreateLibraryDialog(Project project, final String title, final String inProjectText, final String isGlobalText) {
super(project, true);
setModal(true);
setTitle(title);
myInProject.setSelected(true);
myInProject.setMnemonic(KeyEvent.VK_P);
myGlobal.setMnemonic(KeyEvent.VK_G);
myInProject.setText(inProjectText);
myGlobal.setText(isGlobalText);
init();
}
protected JComponent createCenterPanel() {
return contentPane;
}
public boolean isInProject() {
return myInProject.isSelected();
}
}
| apache-2.0 |
KurtTaylan/ocpJavaSE8preparation | src/chapter2DesignPatternsandPrinciples/chapter2_4UnderstandingDesignPrinciples/chapter2_4_4ObjectComposition/Gas.java | 369 | /**
*
*/
package chapter2DesignPatternsandPrinciples.chapter2_4UnderstandingDesignPrinciples.chapter2_4_4ObjectComposition;
/**
* @author tkurt
* Date: Apr 7, 2016 5:26:22 PM
*
*/
public class Gas {
public void getSpeedUp(){
System.out.println("Speed is getting up");
}
public void getSpeedDown(){
System.out.println("Speed is getting down");
}
}
| apache-2.0 |
amidrunk/synapse-lang | compiler/src/test/java/synapse/compiler/parser/impl/JavaCCParserAdapterTest.java | 3357 | package synapse.compiler.parser.impl;
import org.junit.Before;
import org.junit.Test;
import org.mockito.InOrder;
import org.mockito.Mockito;
import synapse.compiler.parser.ClassDelegate;
import synapse.compiler.parser.ParserDelegate;
import synapse.compiler.parser.PatternBuilder;
import synapse.compiler.parser.ReceptorDelegate;
import java.io.ByteArrayInputStream;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class JavaCCParserAdapterTest {
private final ParserDelegate parserDelegate = mock(ParserDelegate.class);
private final ClassDelegate classDelegate = mock(ClassDelegate.class);
private final ReceptorDelegate receptorDelegate = mock(ReceptorDelegate.class);
private final PatternBuilder patternDelegate = mock(PatternBuilder.class);
@Before
public void setup() {
when(parserDelegate.beginClass(anyString())).thenReturn(classDelegate);
when(classDelegate.beginReceptor(anyString())).thenReturn(receptorDelegate);
when(receptorDelegate.beginMessageFieldPattern()).thenReturn(patternDelegate);
}
@Test
public void emptyClassWithCanBeParsed() {
parse("class MyClass() {}");
final InOrder inOrder = Mockito.inOrder(parserDelegate, classDelegate);
inOrder.verify(parserDelegate).beginClass(eq("MyClass"));
inOrder.verify(classDelegate).complete();
}
@Test
public void classWithSingleField() {
parse("class MyClass(myField:String) {}");
final InOrder inOrder = Mockito.inOrder(parserDelegate, classDelegate);
inOrder.verify(parserDelegate).beginClass(eq("MyClass"));
inOrder.verify(classDelegate).addField(eq("myField"), eq("String"));
inOrder.verify(classDelegate).complete();
}
@Test
public void classWithSingleDataLessReceptor() {
parse("class MyClass() { test { => } }");
final InOrder inOrder = Mockito.inOrder(parserDelegate, classDelegate, receptorDelegate);
inOrder.verify(parserDelegate).beginClass(eq("MyClass"));
inOrder.verify(classDelegate).beginReceptor(eq("test"));
inOrder.verify(receptorDelegate).complete();
inOrder.verify(classDelegate).complete();
}
@Test
public void classWithSingleReceptorWithTypedMessageFieldBoundToVariable() {
parse("class MyClass() { test { foo:int =>} }");
final InOrder inOrder = Mockito.inOrder(parserDelegate, classDelegate, receptorDelegate, patternDelegate);
inOrder.verify(parserDelegate).beginClass(eq("MyClass"));
inOrder.verify(classDelegate).beginReceptor(eq("test"));
inOrder.verify(receptorDelegate).beginMessageFieldPattern();
inOrder.verify(patternDelegate).typedVariable(eq("foo"), eq("int"));
inOrder.verify(receptorDelegate).complete();
inOrder.verify(classDelegate).complete();
}
@SuppressWarnings("unchecked")
protected void parse(String code) {
try {
new JavaCCParserAdapter().parse(parserDelegate, new ByteArrayInputStream(code.getBytes()), "UTF-8");
} catch (Exception e) {
throw new RuntimeException(e);
}
}
} | apache-2.0 |
gerrit-review/gerrit | java/com/google/gerrit/server/project/ProjectData.java | 1176 | // Copyright (C) 2017 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.project;
import com.google.common.collect.ImmutableList;
import com.google.gerrit.reviewdb.client.Project;
public class ProjectData {
private final Project project;
private final ImmutableList<Project.NameKey> ancestors;
public ProjectData(Project project, Iterable<Project.NameKey> ancestors) {
this.project = project;
this.ancestors = ImmutableList.copyOf(ancestors);
}
public Project getProject() {
return project;
}
public ImmutableList<Project.NameKey> getAncestors() {
return ancestors;
}
}
| apache-2.0 |
Gadreel/dcraft | dcraft.core/src/main/java/dcraft/web/core/RpcServiceResult.java | 2640 | package dcraft.web.core;
import dcraft.bus.Message;
import dcraft.bus.ServiceResult;
import dcraft.log.Logger;
import dcraft.session.Session;
import io.netty.handler.codec.http.cookie.Cookie;
import io.netty.handler.codec.http.cookie.DefaultCookie;
public class RpcServiceResult extends ServiceResult {
protected WebContext context = null;
protected String sessid = null;
public RpcServiceResult(WebContext context, String sessid) {
this.context = context;
this.sessid = sessid;
}
@Override
public void callback() {
WebContext ctx = this.context;
this.context = null;
if (ctx == null)
return;
try {
// if we did not get an official reply to the request then
// it may have been a timeout. regardless, collect messages
// and prepare to return any payload
Message rmsg = this.toLogMessage();
// add the body (payload) if any
Message reply = this.getResult();
if (reply != null) {
if (reply.hasField("Service"))
rmsg.setField("Service", reply.getField("Service"));
if (reply.hasField("Feature"))
rmsg.setField("Feature", reply.getField("Feature"));
if (reply.hasField("Op"))
rmsg.setField("Op", reply.getField("Op"));
if (reply.hasField("Tag"))
rmsg.setField("Tag", reply.getField("Tag"));
if (reply.hasField("FromHub"))
rmsg.setField("FromHub", reply.getField("FromHub"));
//if (reply.hasField("Body")) - always even if null
rmsg.setField("Body", reply.getField("Body"));
}
Session sess = ctx.getSession();
// session may be null on Session - Control - Stop
if (sess != null) {
String currsessid = sess.getId();
rmsg.setField("Session", currsessid);
if ((this.sessid != null) && !currsessid.equals(this.sessid))
rmsg.setField("SessionChanged", true);
// web server does not send SessionSecret or AuthToken in response
//System.out.println("outgoing rpc: " + rmsg);
String authupdate = sess.checkTokenUpdate();
if (authupdate != null) {
Cookie sk = new DefaultCookie("dcAuthToken", authupdate);
sk.setPath("/");
sk.setHttpOnly(true);
ctx.getResponse().setCookie(sk);
}
}
// TODO switch so we can trace by tag - these values aren't helpful
if (Logger.isDebug())
Logger.debug("RPC Reply Message: " + rmsg.getFieldAsString("Service") + " - " + rmsg.getFieldAsString("Feature")
+ " - " + rmsg.getFieldAsString("Op"));
ctx.send(rmsg);
}
catch (Exception x) {
Logger.info("Error replying to RPC request: " + x);
ctx.sendInternalError();
}
}
} | apache-2.0 |
mrFlick72/socialDocumentLibrary | search-book-service/src/main/java/it/valeriovaudi/documentlibrary/service/SearchBookService.java | 8634 | package it.valeriovaudi.documentlibrary.service;
import it.valeriovaudi.documentlibrary.model.SearchIndex;
import it.valeriovaudi.documentlibrary.repository.SearchIndexRepository;
import it.valeriovaudi.documentlibrary.repository.SearchIndexRepositoryMongoImpl;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.servlet.mvc.method.annotation.MvcUriComponentsBuilder;
import javax.annotation.PostConstruct;
import javax.json.Json;
import javax.json.JsonArray;
import javax.json.JsonObject;
import java.io.StringReader;
import java.net.URI;
import java.util.*;
/**
* Created by Valerio on 09/06/2015.
*/
@RestController
@RequestMapping("/searchBookIndex")
public class SearchBookService {
@Autowired
private SearchIndexRepository searchIndexRepository;
private Map<String,Class> mapping;
public void setMapping(Map<String, Class> mapping) {
this.mapping = mapping;
}
public void setSearchIndexRepository(SearchIndexRepository searchIndexRepository) {
this.searchIndexRepository = searchIndexRepository;
}
@PostConstruct
private void init(){
this.mapping = new HashMap<>();
mapping.put("bookName",String.class);
mapping.put("searchTags",List.class);
}
/* insert methods */
// SearchIndex saveIndex(I bookId,String bookName,boolean published, String... searchTags);
@RequestMapping(method = RequestMethod.POST)
public ResponseEntity<Void> saveIndex(@RequestBody SearchIndex searchIndex){
return putMethodsResponseEntityFactory(HttpStatus.CREATED,searchIndexRepository.saveIndex(searchIndex));
}
/* update methods */
// SearchIndex publishSearchBookIndex(I bookId, boolean published);
@RequestMapping(value = "/{bookId}/publishBook",method = RequestMethod.PUT)
public ResponseEntity<Void> publishSearchBookIndex(@PathVariable(value = "bookId") String bookId){
SearchIndex searchIndex = searchIndexRepository.publishSearchBookIndex(bookId, true);
return putMethodsResponseEntityFactory(HttpStatus.NO_CONTENT,searchIndex);
}
@RequestMapping(value = "/{bookId}/unPublishBook",method = RequestMethod.PUT)
public ResponseEntity<Void> unPublishSearchBookIndex(@PathVariable(value = "bookId") String bookId){
return putMethodsResponseEntityFactory(HttpStatus.NO_CONTENT,searchIndexRepository.publishSearchBookIndex(bookId, false));
}
// SearchIndex addSearchTags(I bookId, String...searchTags);
@RequestMapping(value = "/{bookId}/addSearchTags",method = RequestMethod.PUT)
public ResponseEntity<Void> addSearchTags(@PathVariable(value = "bookId") String bookId,
@RequestBody String[] searchTags){
return putMethodsResponseEntityFactory(HttpStatus.NO_CONTENT,searchIndexRepository.addSearchTags(bookId, searchTags));
}
// SearchIndex removeSearchTags(I bookId, String...searchTags);
@RequestMapping(value = "/{bookId}/removeSearchTags",method = RequestMethod.PUT)
public ResponseEntity<Void> removeSearchTags(@PathVariable(value = "bookId") String bookId,
@RequestBody String[] searchTags){
return putMethodsResponseEntityFactory(HttpStatus.NO_CONTENT,searchIndexRepository.removeSearchTags(bookId, searchTags));
}
@RequestMapping(value = "/{bookId}",method = RequestMethod.PUT)
public ResponseEntity<Void> updateSearchIndex(@PathVariable(value = "bookId") String bookId,
@RequestBody String body){
SearchIndex searchIndexById = searchIndexRepository.findSearchIndexById(bookId);
JsonObject readBody = Json.createReader(new StringReader(body)).readObject();
if(readBody.containsKey("published")){
searchIndexById.setPublished(readBody.getBoolean("published"));
}
if(readBody.containsKey("searchTags")){
JsonArray searchTags = readBody.getJsonArray("searchTags");
List<String> searchTagsValues = new ArrayList<>();
for(int i = 0 ; i < searchTags.size(); i++){
searchTagsValues.add(searchTags.getString(i));
}
searchIndexById.setSearchTags(searchTagsValues);
}
return putMethodsResponseEntityFactory(HttpStatus.NO_CONTENT,searchIndexRepository.saveIndex(searchIndexById));
}
/* read methods*/
// SearchIndex findSearckIndexById(I bookId);
@RequestMapping(value = "/{bookId}", method = RequestMethod.GET)
public ResponseEntity findSearchIndexById(@PathVariable(value = "bookId") String bookId){
SearchIndex searckIndexById = searchIndexRepository.findSearchIndexById(bookId);
return searckIndexById != null ? ResponseEntity.ok(searchIndexRepository.findSearchIndexById(bookId)) : ResponseEntity.notFound().build();
}
// List<SearchIndex> findSearchIndexByMetadata(String bookName,String... searchTags);
// List<SearchIndex> findAllSearchIndex(int page,int pageSize);
@RequestMapping(method = RequestMethod.GET)
public ResponseEntity<List<SearchIndex>> findSearchIndex(@RequestParam(value = "q",required = false,defaultValue = "") String query,
@RequestParam(value = "page", defaultValue = SearchIndexRepositoryMongoImpl.EMPTY_PAGE_STRING_PARAMITER,required = false) int page,
@RequestParam(value = "pageSize", defaultValue = SearchIndexRepositoryMongoImpl.EMPTY_PAGE_STRING_PARAMITER,required = false) int pageSize){
List<SearchIndex> result;
Map<String, Object> paramiters = searchIndexQueryParser(query,mapping);
if(paramiters.size() > 0 ){
String bookName = (String) paramiters.get("bookName");
List<String> querySearchTags = (List<String>) paramiters.get("searchTags");
List<String> searchTags = querySearchTags !=null ? querySearchTags : new ArrayList<>();
String[] searchMetadataArray = searchTags.toArray(new String[searchTags.size()]);
result = searchIndexRepository.findSearchIndexByMetadata(bookName, searchMetadataArray);
} else {
result = searchIndexRepository.findAllSearchIndex(page,pageSize);
}
return ResponseEntity.ok(result);
}
@RequestMapping(value = "/{bookId}", method = RequestMethod.DELETE)
public ResponseEntity<Void> deleteSearchIndex(@PathVariable("bookId") String bookId){
return searchIndexRepository.deleteSearchIndex(bookId) ?
ResponseEntity.noContent().build() :
ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build();
}
private ResponseEntity<Void> putMethodsResponseEntityFactory(HttpStatus httpStatus,SearchIndex searchIndex){
if(searchIndex != null){
URI location = MvcUriComponentsBuilder.fromMethodName(SearchBookService.class, "findSearchIndexById", searchIndex.getBookId()).build().toUri();
return ResponseEntity.status(httpStatus).location(location).build();
} else {
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build();
}
}
private Map<String,Object> searchIndexQueryParser(String query,Map<String,Class> mapping){
Map<String,Object> map = new HashMap<>();
String[] split = query.split(";");
Arrays.asList(split).stream()
.forEach(strings -> {
String[] keyValueItem = strings.trim().split("=");
if(keyValueItem.length == 2){
map.put(keyValueItem[0].trim(),
valueAdapter(keyValueItem[1].trim(),
keyValueItem[0].trim(),
mapping));
}
});
return map;
}
private Object valueAdapter(String value,String keyValue,Map<String,Class> mapping){
Class aClass = mapping.get(keyValue);
Object result=null;
if(aClass != null){
if(aClass == String.class){
result = value;
} else if(aClass == List.class) {
result = Arrays.asList(value.split(","));
}else if (aClass == Integer.class){
result = new Integer(value);
}
}
return result;
}
}
| apache-2.0 |
felipeg48/spring-boot | spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/security/oauth2/resource/servlet/OAuth2ResourceServerWebSecurityConfiguration.java | 1740 | /*
* Copyright 2012-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.security.oauth2.resource.servlet;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.oauth2.jwt.JwtDecoder;
/**
* {@link WebSecurityConfigurerAdapter} for OAuth2 resource server support.
*
* @author Madhura Bhave
*/
@Configuration
@ConditionalOnMissingBean(WebSecurityConfigurerAdapter.class)
class OAuth2ResourceServerWebSecurityConfiguration {
@Configuration
@ConditionalOnBean(JwtDecoder.class)
static class OAuth2WebSecurityConfigurerAdapter extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
http.authorizeRequests().anyRequest().authenticated().and().oauth2()
.resourceServer().jwt();
}
}
}
| apache-2.0 |
ismael-sarmento-jr/hapi-fhir | hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/SearchBuilder.java | 78434 | package ca.uhn.fhir.jpa.dao;
import static org.apache.commons.lang3.StringUtils.defaultString;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2016 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import java.math.BigDecimal;
import java.math.MathContext;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.UUID;
import javax.persistence.EntityManager;
import javax.persistence.Tuple;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Expression;
import javax.persistence.criteria.From;
import javax.persistence.criteria.Join;
import javax.persistence.criteria.JoinType;
import javax.persistence.criteria.Order;
import javax.persistence.criteria.Path;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import javax.persistence.criteria.Subquery;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.hl7.fhir.instance.model.api.IAnyResource;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionTemplate;
import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.context.RuntimeChildResourceDefinition;
import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.context.RuntimeSearchParam;
import ca.uhn.fhir.jpa.dao.data.IResourceIndexedSearchParamUriDao;
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
import ca.uhn.fhir.jpa.entity.BaseHasResource;
import ca.uhn.fhir.jpa.entity.BaseResourceIndexedSearchParam;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamDate;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamNumber;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamQuantity;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamString;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamToken;
import ca.uhn.fhir.jpa.entity.ResourceIndexedSearchParamUri;
import ca.uhn.fhir.jpa.entity.ResourceLink;
import ca.uhn.fhir.jpa.entity.ResourceTable;
import ca.uhn.fhir.jpa.entity.ResourceTag;
import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.entity.SearchInclude;
import ca.uhn.fhir.jpa.entity.SearchResult;
import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
import ca.uhn.fhir.jpa.entity.TagDefinition;
import ca.uhn.fhir.jpa.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
import ca.uhn.fhir.jpa.util.StopWatch;
import ca.uhn.fhir.model.api.IPrimitiveDatatype;
import ca.uhn.fhir.model.api.IQueryParameterType;
import ca.uhn.fhir.model.api.IResource;
import ca.uhn.fhir.model.api.Include;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
import ca.uhn.fhir.model.base.composite.BaseCodingDt;
import ca.uhn.fhir.model.base.composite.BaseIdentifierDt;
import ca.uhn.fhir.model.base.composite.BaseQuantityDt;
import ca.uhn.fhir.model.dstu.resource.BaseResource;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.model.primitive.InstantDt;
import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum;
import ca.uhn.fhir.rest.api.SortOrderEnum;
import ca.uhn.fhir.rest.api.SortSpec;
import ca.uhn.fhir.rest.method.RestSearchParameterTypeEnum;
import ca.uhn.fhir.rest.param.CompositeParam;
import ca.uhn.fhir.rest.param.DateParam;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.NumberParam;
import ca.uhn.fhir.rest.param.ParamPrefixEnum;
import ca.uhn.fhir.rest.param.QuantityParam;
import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.param.UriParam;
import ca.uhn.fhir.rest.param.UriParamQualifierEnum;
import ca.uhn.fhir.rest.server.Constants;
import ca.uhn.fhir.rest.server.IBundleProvider;
import ca.uhn.fhir.rest.server.SimpleBundleProvider;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
public class SearchBuilder {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchBuilder.class);
private BaseHapiFhirDao<?> myCallingDao;
private FhirContext myContext;
private EntityManager myEntityManager;
private SearchParameterMap myParams;
private Collection<Long> myPids;
private PlatformTransactionManager myPlatformTransactionManager;
private IResourceIndexedSearchParamUriDao myResourceIndexedSearchParamUriDao;
private String myResourceName;
private Class<? extends IBaseResource> myResourceType;
private IFulltextSearchSvc mySearchDao;
private Search mySearchEntity;
private ISearchResultDao mySearchResultDao;
public SearchBuilder(FhirContext theFhirContext, EntityManager theEntityManager, PlatformTransactionManager thePlatformTransactionManager, IFulltextSearchSvc theSearchDao,
ISearchResultDao theSearchResultDao, BaseHapiFhirDao<?> theDao, IResourceIndexedSearchParamUriDao theResourceIndexedSearchParamUriDao) {
myContext = theFhirContext;
myEntityManager = theEntityManager;
myPlatformTransactionManager = thePlatformTransactionManager;
mySearchDao = theSearchDao;
mySearchResultDao = theSearchResultDao;
myCallingDao = theDao;
myResourceIndexedSearchParamUriDao = theResourceIndexedSearchParamUriDao;
}
private void addPredicateComposite(RuntimeSearchParam theParamDef, List<? extends IQueryParameterType> theNextAnd) {
// TODO: fail if missing is set for a composite query
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Long> cq = builder.createQuery(Long.class);
Root<ResourceTable> from = cq.from(ResourceTable.class);
cq.select(from.get("myId").as(Long.class));
IQueryParameterType or = theNextAnd.get(0);
if (!(or instanceof CompositeParam<?, ?>)) {
throw new InvalidRequestException("Invalid type for composite param (must be " + CompositeParam.class.getSimpleName() + ": " + or.getClass());
}
CompositeParam<?, ?> cp = (CompositeParam<?, ?>) or;
List<Predicate> predicates = new ArrayList<Predicate>();
predicates.add(builder.equal(from.get("myResourceType"), myResourceName));
RuntimeSearchParam left = theParamDef.getCompositeOf().get(0);
IQueryParameterType leftValue = cp.getLeftValue();
predicates.add(createCompositeParamPart(builder, from, left, leftValue));
RuntimeSearchParam right = theParamDef.getCompositeOf().get(1);
IQueryParameterType rightValue = cp.getRightValue();
predicates.add(createCompositeParamPart(builder, from, right, rightValue));
createPredicateResourceId(builder, cq, predicates, from.get("myId").as(Long.class));
cq.where(builder.and(toArray(predicates)));
TypedQuery<Long> q = myEntityManager.createQuery(cq);
doSetPids(q.getResultList());
}
private void addPredicateDate(String theParamName, List<? extends IQueryParameterType> theList) {
if (Boolean.TRUE.equals(theList.get(0).getMissing())) {
addPredicateParamMissing("myParamsDate", theParamName, ResourceIndexedSearchParamDate.class);
return;
}
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Long> cq = builder.createQuery(Long.class);
Root<ResourceIndexedSearchParamDate> from = cq.from(ResourceIndexedSearchParamDate.class);
cq.select(from.get("myResourcePid").as(Long.class));
List<Predicate> codePredicates = new ArrayList<Predicate>();
for (IQueryParameterType nextOr : theList) {
if (addPredicateMissingFalseIfPresent(builder, theParamName, from, codePredicates, nextOr)) {
continue;
}
IQueryParameterType params = nextOr;
Predicate p = createPredicateDate(builder, from, params);
codePredicates.add(p);
}
Predicate masterCodePredicate = builder.or(toArray(codePredicates));
List<Predicate> predicates = new ArrayList<Predicate>();
predicates.add(builder.equal(from.get("myResourceType"), myResourceName));
predicates.add(builder.equal(from.get("myParamName"), theParamName));
createPredicateResourceId(builder, cq, predicates, from.get("myResourcePid").as(Long.class));
createPredicateLastUpdatedForIndexedSearchParam(builder, from, predicates);
predicates.add(masterCodePredicate);
cq.where(builder.and(toArray(predicates)));
TypedQuery<Long> q = myEntityManager.createQuery(cq);
doSetPids(q.getResultList());
}
private void addPredicateId(Set<Long> thePids, DateRangeParam theLastUpdated) {
if (thePids == null || thePids.isEmpty()) {
return;
}
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Long> cq = builder.createQuery(Long.class);
Root<ResourceTable> from = cq.from(ResourceTable.class);
cq.select(from.get("myId").as(Long.class));
List<Predicate> predicates = new ArrayList<Predicate>();
predicates.add(builder.equal(from.get("myResourceType"), myResourceName));
predicates.add(from.get("myId").in(thePids));
createPredicateResourceId(builder, cq, predicates, from.get("myId").as(Long.class));
createPredicateLastUpdatedForResourceTable(builder, from, predicates);
cq.where(toArray(predicates));
TypedQuery<Long> q = myEntityManager.createQuery(cq);
doSetPids(q.getResultList());
}
private void addPredicateLanguage(List<List<? extends IQueryParameterType>> theList, DateRangeParam theLastUpdated) {
for (List<? extends IQueryParameterType> nextList : theList) {
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Long> cq = builder.createQuery(Long.class);
Root<ResourceTable> from = cq.from(ResourceTable.class);
cq.select(from.get("myId").as(Long.class));
Set<String> values = new HashSet<String>();
for (IQueryParameterType next : nextList) {
if (next instanceof StringParam) {
String nextValue = ((StringParam) next).getValue();
if (isBlank(nextValue)) {
continue;
}
values.add(nextValue);
} else {
throw new InternalErrorException("Lanugage parameter must be of type " + StringParam.class.getCanonicalName() + " - Got " + next.getClass().getCanonicalName());
}
}
if (values.isEmpty()) {
continue;
}
List<Predicate> predicates = new ArrayList<Predicate>();
predicates.add(builder.equal(from.get("myResourceType"), myResourceName));
predicates.add(from.get("myLanguage").as(String.class).in(values));
createPredicateResourceId(builder, cq, predicates, from.get("myId").as(Long.class));
createPredicateLastUpdatedForResourceTable(builder, from, predicates);
predicates.add(builder.isNull(from.get("myDeleted")));
cq.where(toArray(predicates));
TypedQuery<Long> q = myEntityManager.createQuery(cq);
doSetPids(q.getResultList());
if (doHaveNoResults()) {
return;
}
}
return;
}
private boolean addPredicateMissingFalseIfPresent(CriteriaBuilder theBuilder, String theParamName, Root<? extends BaseResourceIndexedSearchParam> from, List<Predicate> codePredicates,
IQueryParameterType nextOr) {
boolean missingFalse = false;
if (nextOr.getMissing() != null) {
if (nextOr.getMissing().booleanValue() == true) {
throw new InvalidRequestException(myContext.getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "multipleParamsWithSameNameOneIsMissingTrue", theParamName));
}
Predicate singleCode = from.get("myId").isNotNull();
Predicate name = theBuilder.equal(from.get("myParamName"), theParamName);
codePredicates.add(theBuilder.and(name, singleCode));
missingFalse = true;
}
return missingFalse;
}
private boolean addPredicateMissingFalseIfPresentForResourceLink(CriteriaBuilder theBuilder, String theParamName, Root<? extends ResourceLink> from, List<Predicate> codePredicates,
IQueryParameterType nextOr) {
boolean missingFalse = false;
if (nextOr.getMissing() != null) {
if (nextOr.getMissing().booleanValue() == true) {
throw new InvalidRequestException(myContext.getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "multipleParamsWithSameNameOneIsMissingTrue", theParamName));
}
Predicate singleCode = from.get("mySourceResource").isNotNull();
Predicate name = createResourceLinkPathPredicate(theParamName, theBuilder, from);
codePredicates.add(theBuilder.and(name, singleCode));
missingFalse = true;
}
return missingFalse;
}
private void addPredicateNumber(String theParamName, List<? extends IQueryParameterType> theList) {
if (Boolean.TRUE.equals(theList.get(0).getMissing())) {
addPredicateParamMissing("myParamsNumber", theParamName, ResourceIndexedSearchParamNumber.class);
return;
}
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Long> cq = builder.createQuery(Long.class);
Root<ResourceIndexedSearchParamNumber> from = cq.from(ResourceIndexedSearchParamNumber.class);
cq.select(from.get("myResourcePid").as(Long.class));
List<Predicate> codePredicates = new ArrayList<Predicate>();
for (IQueryParameterType nextOr : theList) {
IQueryParameterType params = nextOr;
if (addPredicateMissingFalseIfPresent(builder, theParamName, from, codePredicates, nextOr)) {
continue;
}
if (params instanceof NumberParam) {
NumberParam param = (NumberParam) params;
BigDecimal value = param.getValue();
if (value == null) {
continue;
}
final Expression<BigDecimal> fromObj = from.get("myValue");
ParamPrefixEnum prefix = ObjectUtils.defaultIfNull(param.getPrefix(), ParamPrefixEnum.EQUAL);
String invalidMessageName = "invalidNumberPrefix";
String valueAsString = param.getValue().toPlainString();
Predicate num = createPredicateNumeric(builder, params, prefix, value, fromObj, invalidMessageName, valueAsString);
codePredicates.add(num);
} else {
throw new IllegalArgumentException("Invalid token type: " + params.getClass());
}
}
List<Predicate> predicates = new ArrayList<Predicate>();
predicates.add(builder.equal(from.get("myResourceType"), myResourceName));
predicates.add(builder.equal(from.get("myParamName"), theParamName));
predicates.add(builder.or(toArray(codePredicates)));
createPredicateResourceId(builder, cq, predicates, from.get("myResourcePid").as(Long.class));
createPredicateLastUpdatedForIndexedSearchParam(builder, from, predicates);
cq.where(builder.and(toArray(predicates)));
TypedQuery<Long> q = myEntityManager.createQuery(cq);
doSetPids(q.getResultList());
}
private void addPredicateParamMissing(String joinName, String theParamName, Class<? extends BaseResourceIndexedSearchParam> theParamTable) {
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Long> cq = builder.createQuery(Long.class);
Root<ResourceTable> from = cq.from(ResourceTable.class);
cq.select(from.get("myId").as(Long.class));
Subquery<Long> subQ = cq.subquery(Long.class);
Root<? extends BaseResourceIndexedSearchParam> subQfrom = subQ.from(theParamTable);
subQ.select(subQfrom.get("myResourcePid").as(Long.class));
Predicate subQname = builder.equal(subQfrom.get("myParamName"), theParamName);
Predicate subQtype = builder.equal(subQfrom.get("myResourceType"), myResourceName);
subQ.where(builder.and(subQtype, subQname));
List<Predicate> predicates = new ArrayList<Predicate>();
predicates.add(builder.not(builder.in(from.get("myId")).value(subQ)));
predicates.add(builder.equal(from.get("myResourceType"), myResourceName));
predicates.add(builder.isNull(from.get("myDeleted")));
createPredicateResourceId(builder, cq, predicates, from.get("myId").as(Long.class));
cq.where(builder.and(toArray(predicates)));
ourLog.info("Adding :missing qualifier for parameter '{}'", theParamName);
TypedQuery<Long> q = myEntityManager.createQuery(cq);
doSetPids(q.getResultList());
}
private void addPredicateParamMissingResourceLink(String joinName, String theParamName) {
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Long> cq = builder.createQuery(Long.class);
Root<ResourceTable> from = cq.from(ResourceTable.class);
cq.select(from.get("myId").as(Long.class));
Subquery<Long> subQ = cq.subquery(Long.class);
Root<ResourceLink> subQfrom = subQ.from(ResourceLink.class);
subQ.select(subQfrom.get("mySourceResourcePid").as(Long.class));
// subQ.where(builder.equal(subQfrom.get("myParamName"), theParamName));
Predicate path = createResourceLinkPathPredicate(theParamName, builder, subQfrom);
subQ.where(path);
List<Predicate> predicates = new ArrayList<Predicate>();
createPredicateResourceId(builder, cq, predicates, from.get("myId").as(Long.class));
predicates.add(builder.not(builder.in(from.get("myId")).value(subQ)));
predicates.add(builder.equal(from.get("myResourceType"), myResourceName));
cq.where(builder.and(toArray(predicates)));
TypedQuery<Long> q = myEntityManager.createQuery(cq);
List<Long> resultList = q.getResultList();
doSetPids(new HashSet<Long>(resultList));
}
private void addPredicateQuantity(String theParamName, List<? extends IQueryParameterType> theList) {
if (Boolean.TRUE.equals(theList.get(0).getMissing())) {
addPredicateParamMissing("myParamsQuantity", theParamName, ResourceIndexedSearchParamQuantity.class);
return;
}
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Long> cq = builder.createQuery(Long.class);
Root<ResourceIndexedSearchParamQuantity> from = cq.from(ResourceIndexedSearchParamQuantity.class);
cq.select(from.get("myResourcePid").as(Long.class));
List<Predicate> codePredicates = new ArrayList<Predicate>();
for (IQueryParameterType nextOr : theList) {
IQueryParameterType params = nextOr;
if (addPredicateMissingFalseIfPresent(builder, theParamName, from, codePredicates, nextOr)) {
continue;
}
String systemValue;
String unitsValue;
ParamPrefixEnum cmpValue;
BigDecimal valueValue;
String valueString;
if (params instanceof BaseQuantityDt) {
BaseQuantityDt param = (BaseQuantityDt) params;
systemValue = param.getSystemElement().getValueAsString();
unitsValue = param.getUnitsElement().getValueAsString();
cmpValue = ParamPrefixEnum.forDstu1Value(param.getComparatorElement().getValueAsString());
valueValue = param.getValueElement().getValue();
valueString = param.getValueElement().getValueAsString();
} else if (params instanceof QuantityParam) {
QuantityParam param = (QuantityParam) params;
systemValue = param.getSystem();
unitsValue = param.getUnits();
cmpValue = param.getPrefix();
valueValue = param.getValue();
valueString = param.getValueAsString();
} else {
throw new IllegalArgumentException("Invalid quantity type: " + params.getClass());
}
Predicate system = null;
if (!isBlank(systemValue)) {
system = builder.equal(from.get("mySystem"), systemValue);
}
Predicate code = null;
if (!isBlank(unitsValue)) {
code = builder.equal(from.get("myUnits"), unitsValue);
}
cmpValue = ObjectUtils.defaultIfNull(cmpValue, ParamPrefixEnum.EQUAL);
final Expression<BigDecimal> path = from.get("myValue");
String invalidMessageName = "invalidQuantityPrefix";
Predicate num = createPredicateNumeric(builder, params, cmpValue, valueValue, path, invalidMessageName, valueString);
if (system == null && code == null) {
codePredicates.add(num);
} else if (system == null) {
Predicate singleCode = builder.and(code, num);
codePredicates.add(singleCode);
} else if (code == null) {
Predicate singleCode = builder.and(system, num);
codePredicates.add(singleCode);
} else {
Predicate singleCode = builder.and(system, code, num);
codePredicates.add(singleCode);
}
}
List<Predicate> predicates = new ArrayList<Predicate>();
predicates.add(builder.equal(from.get("myResourceType"), myResourceName));
predicates.add(builder.equal(from.get("myParamName"), theParamName));
predicates.add(builder.or(toArray(codePredicates)));
createPredicateResourceId(builder, cq, predicates, from.get("myResourcePid").as(Long.class));
createPredicateLastUpdatedForIndexedSearchParam(builder, from, predicates);
cq.where(builder.and(toArray(predicates)));
TypedQuery<Long> q = myEntityManager.createQuery(cq);
doSetPids(new HashSet<Long>(q.getResultList()));
}
private void addPredicateReference(String theParamName, List<? extends IQueryParameterType> theList) {
assert theParamName.contains(".") == false;
if (Boolean.TRUE.equals(theList.get(0).getMissing())) {
addPredicateParamMissingResourceLink("myResourceLinks", theParamName);
return;
}
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Long> cq = builder.createQuery(Long.class);
Root<ResourceLink> from = cq.from(ResourceLink.class);
cq.select(from.get("mySourceResourcePid").as(Long.class));
List<Predicate> codePredicates = new ArrayList<Predicate>();
for (IQueryParameterType nextOr : theList) {
IQueryParameterType params = nextOr;
if (addPredicateMissingFalseIfPresentForResourceLink(builder, theParamName, from, codePredicates, nextOr)) {
continue;
}
if (params instanceof ReferenceParam) {
ReferenceParam ref = (ReferenceParam) params;
if (isBlank(ref.getChain())) {
String resourceId = ref.getValueAsQueryToken(myContext);
if (resourceId.contains("/")) {
IIdType dt = new IdDt(resourceId);
resourceId = dt.getIdPart();
}
Long targetPid = myCallingDao.translateForcedIdToPid(new IdDt(resourceId));
ourLog.debug("Searching for resource link with target PID: {}", targetPid);
Predicate eq = builder.equal(from.get("myTargetResourcePid"), targetPid);
codePredicates.add(eq);
} else {
String paramPath = myContext.getResourceDefinition(myResourceType).getSearchParam(theParamName).getPath();
BaseRuntimeChildDefinition def = myContext.newTerser().getDefinition(myResourceType, paramPath);
if (!(def instanceof RuntimeChildResourceDefinition)) {
throw new ConfigurationException("Property " + paramPath + " of type " + myResourceName + " is not a resource: " + def.getClass());
}
List<Class<? extends IBaseResource>> resourceTypes;
String resourceId;
if (!ref.getValue().matches("[a-zA-Z]+\\/.*")) {
RuntimeChildResourceDefinition resDef = (RuntimeChildResourceDefinition) def;
resourceTypes = resDef.getResourceTypes();
resourceId = ref.getValue();
} else {
resourceTypes = new ArrayList<Class<? extends IBaseResource>>();
RuntimeResourceDefinition resDef = myContext.getResourceDefinition(ref.getResourceType());
resourceTypes.add(resDef.getImplementingClass());
resourceId = ref.getIdPart();
}
boolean foundChainMatch = false;
String chain = ref.getChain();
String remainingChain = null;
int chainDotIndex = chain.indexOf('.');
if (chainDotIndex != -1) {
remainingChain = chain.substring(chainDotIndex + 1);
chain = chain.substring(0, chainDotIndex);
}
for (Class<? extends IBaseResource> nextType : resourceTypes) {
RuntimeResourceDefinition typeDef = myContext.getResourceDefinition(nextType);
IFhirResourceDao<?> dao = myCallingDao.getDao(nextType);
if (dao == null) {
ourLog.debug("Don't have a DAO for type {}", nextType.getSimpleName());
continue;
}
int qualifierIndex = chain.indexOf(':');
String qualifier = null;
if (qualifierIndex != -1) {
qualifier = chain.substring(qualifierIndex);
chain = chain.substring(0, qualifierIndex);
}
boolean isMeta = BaseHapiFhirDao.RESOURCE_META_PARAMS.containsKey(chain);
RuntimeSearchParam param = null;
if (!isMeta) {
param = typeDef.getSearchParam(chain);
if (param == null) {
ourLog.debug("Type {} doesn't have search param {}", nextType.getSimpleName(), param);
continue;
}
}
IQueryParameterType chainValue;
if (remainingChain != null) {
if (param == null || param.getParamType() != RestSearchParameterTypeEnum.REFERENCE) {
ourLog.debug("Type {} parameter {} is not a reference, can not chain {}", new Object[] { nextType.getSimpleName(), chain, remainingChain });
continue;
}
chainValue = new ReferenceParam();
chainValue.setValueAsQueryToken(qualifier, resourceId);
((ReferenceParam) chainValue).setChain(remainingChain);
} else if (isMeta) {
IQueryParameterType type = BaseHapiFhirDao.newInstanceType(chain);
type.setValueAsQueryToken(qualifier, resourceId);
chainValue = type;
} else {
chainValue = toParameterType(param, qualifier, resourceId);
}
foundChainMatch = true;
Set<Long> pids = dao.searchForIds(chain, chainValue);
if (pids.isEmpty()) {
continue;
}
Predicate eq = from.get("myTargetResourcePid").in(pids);
codePredicates.add(eq);
}
if (!foundChainMatch) {
throw new InvalidRequestException(myContext.getLocalizer().getMessage(BaseHapiFhirResourceDao.class, "invalidParameterChain", theParamName + '.' + ref.getChain()));
}
}
} else {
throw new IllegalArgumentException("Invalid token type (expecting ReferenceParam): " + params.getClass());
}
}
List<Predicate> predicates = new ArrayList<Predicate>();
predicates.add(createResourceLinkPathPredicate(theParamName, builder, from));
predicates.add(builder.or(toArray(codePredicates)));
createPredicateResourceId(builder, cq, predicates, from.get("mySourceResourcePid").as(Long.class));
createPredicateLastUpdatedForResourceLink(builder, from, predicates);
cq.where(builder.and(toArray(predicates)));
TypedQuery<Long> q = myEntityManager.createQuery(cq);
doSetPids(new HashSet<Long>(q.getResultList()));
}
private void addPredicateString(String theParamName, List<? extends IQueryParameterType> theList) {
if (Boolean.TRUE.equals(theList.get(0).getMissing())) {
addPredicateParamMissing("myParamsString", theParamName, ResourceIndexedSearchParamString.class);
return;
}
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Long> cq = builder.createQuery(Long.class);
Root<ResourceIndexedSearchParamString> from = cq.from(ResourceIndexedSearchParamString.class);
cq.select(from.get("myResourcePid").as(Long.class));
List<Predicate> codePredicates = new ArrayList<Predicate>();
for (IQueryParameterType nextOr : theList) {
IQueryParameterType theParameter = nextOr;
if (addPredicateMissingFalseIfPresent(builder, theParamName, from, codePredicates, nextOr)) {
continue;
}
Predicate singleCode = createPredicateString(theParameter, theParamName, builder, from);
codePredicates.add(singleCode);
}
List<Predicate> predicates = new ArrayList<Predicate>();
predicates.add(builder.equal(from.get("myResourceType"), myResourceName));
predicates.add(builder.equal(from.get("myParamName"), theParamName));
predicates.add(builder.or(toArray(codePredicates)));
createPredicateResourceId(builder, cq, predicates, from.get("myResourcePid").as(Long.class));
createPredicateLastUpdatedForIndexedSearchParam(builder, from, predicates);
cq.where(builder.and(toArray(predicates)));
TypedQuery<Long> q = myEntityManager.createQuery(cq);
doSetPids(new HashSet<Long>(q.getResultList()));
}
private void addPredicateTag(List<List<? extends IQueryParameterType>> theList, String theParamName, DateRangeParam theLastUpdated) {
TagTypeEnum tagType;
if (Constants.PARAM_TAG.equals(theParamName)) {
tagType = TagTypeEnum.TAG;
} else if (Constants.PARAM_PROFILE.equals(theParamName)) {
tagType = TagTypeEnum.PROFILE;
} else if (Constants.PARAM_SECURITY.equals(theParamName)) {
tagType = TagTypeEnum.SECURITY_LABEL;
} else {
throw new IllegalArgumentException("Param name: " + theParamName); // shouldn't happen
}
for (List<? extends IQueryParameterType> nextAndParams : theList) {
boolean haveTags = false;
for (IQueryParameterType nextParamUncasted : nextAndParams) {
if (nextParamUncasted instanceof TokenParam) {
TokenParam nextParam = (TokenParam) nextParamUncasted;
if (isNotBlank(nextParam.getValue())) {
haveTags = true;
} else if (isNotBlank(nextParam.getSystem())) {
throw new InvalidRequestException("Invalid " + theParamName + " parameter (must supply a value/code and not just a system): " + nextParam.getValueAsQueryToken(myContext));
}
} else {
UriParam nextParam = (UriParam) nextParamUncasted;
if (isNotBlank(nextParam.getValue())) {
haveTags = true;
}
}
}
if (!haveTags) {
continue;
}
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Long> cq = builder.createQuery(Long.class);
Root<ResourceTag> from = cq.from(ResourceTag.class);
cq.select(from.get("myResourceId").as(Long.class));
List<Predicate> andPredicates = new ArrayList<Predicate>();
andPredicates.add(builder.equal(from.get("myResourceType"), myResourceName));
List<Predicate> orPredicates = new ArrayList<Predicate>();
for (IQueryParameterType nextOrParams : nextAndParams) {
String code;
String system;
if (nextOrParams instanceof TokenParam) {
TokenParam nextParam = (TokenParam) nextOrParams;
code = nextParam.getValue();
system = nextParam.getSystem();
} else {
UriParam nextParam = (UriParam) nextOrParams;
code = nextParam.getValue();
system = null;
}
From<ResourceTag, TagDefinition> defJoin = from.join("myTag");
Predicate typePrediate = builder.equal(defJoin.get("myTagType"), tagType);
Predicate codePrediate = builder.equal(defJoin.get("myCode"), code);
if (isBlank(code)) {
continue;
}
if (isNotBlank(system)) {
Predicate systemPrediate = builder.equal(defJoin.get("mySystem"), system);
orPredicates.add(builder.and(typePrediate, systemPrediate, codePrediate));
} else {
orPredicates.add(builder.and(typePrediate, codePrediate));
}
}
if (orPredicates.isEmpty() == false) {
andPredicates.add(builder.or(toArray(orPredicates)));
}
From<ResourceTag, ResourceTable> defJoin = from.join("myResource");
Predicate notDeletedPredicatePrediate = builder.isNull(defJoin.get("myDeleted"));
andPredicates.add(notDeletedPredicatePrediate);
if (theLastUpdated != null) {
andPredicates.addAll(createLastUpdatedPredicates(theLastUpdated, builder, defJoin));
}
createPredicateResourceId(builder, cq, andPredicates, from.get("myResourceId").as(Long.class));
Predicate masterCodePredicate = builder.and(toArray(andPredicates));
cq.where(masterCodePredicate);
TypedQuery<Long> q = myEntityManager.createQuery(cq);
Set<Long> pids = new HashSet<Long>(q.getResultList());
doSetPids(pids);
}
}
private void addPredicateToken(String theParamName, List<? extends IQueryParameterType> theList) {
if (Boolean.TRUE.equals(theList.get(0).getMissing())) {
addPredicateParamMissing("myParamsToken", theParamName, ResourceIndexedSearchParamToken.class);
return;
}
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Long> cq = builder.createQuery(Long.class);
Root<ResourceIndexedSearchParamToken> from = cq.from(ResourceIndexedSearchParamToken.class);
cq.select(from.get("myResourcePid").as(Long.class));
List<Predicate> codePredicates = new ArrayList<Predicate>();
for (IQueryParameterType nextOr : theList) {
if (addPredicateMissingFalseIfPresent(builder, theParamName, from, codePredicates, nextOr)) {
continue;
}
if (nextOr instanceof TokenParam) {
TokenParam id = (TokenParam) nextOr;
if (id.isText()) {
addPredicateString(theParamName, theList);
continue;
}
}
Predicate singleCode = createPredicateToken(nextOr, theParamName, builder, from);
codePredicates.add(singleCode);
}
if (codePredicates.isEmpty()) {
return;
}
List<Predicate> predicates = new ArrayList<Predicate>();
predicates.add(builder.equal(from.get("myResourceType"), myResourceName));
predicates.add(builder.equal(from.get("myParamName"), theParamName));
predicates.add(builder.or(toArray(codePredicates)));
createPredicateResourceId(builder, cq, predicates, from.get("myResourcePid").as(Long.class));
cq.where(builder.and(toArray(predicates)));
TypedQuery<Long> q = myEntityManager.createQuery(cq);
doSetPids(new HashSet<Long>(q.getResultList()));
}
private void addPredicateUri(String theParamName, List<? extends IQueryParameterType> theList) {
if (Boolean.TRUE.equals(theList.get(0).getMissing())) {
addPredicateParamMissing("myParamsUri", theParamName, ResourceIndexedSearchParamUri.class);
return;
}
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Long> cq = builder.createQuery(Long.class);
Root<ResourceIndexedSearchParamUri> from = cq.from(ResourceIndexedSearchParamUri.class);
cq.select(from.get("myResourcePid").as(Long.class));
List<Predicate> codePredicates = new ArrayList<Predicate>();
for (IQueryParameterType nextOr : theList) {
IQueryParameterType params = nextOr;
if (addPredicateMissingFalseIfPresent(builder, theParamName, from, codePredicates, nextOr)) {
continue;
}
if (params instanceof UriParam) {
UriParam param = (UriParam) params;
String value = param.getValue();
if (value == null) {
continue;
}
Path<Object> fromObj = from.get("myUri");
Predicate predicate;
if (param.getQualifier() == UriParamQualifierEnum.ABOVE) {
/*
* :above is an inefficient query- It means that the user is supplying a more specific URL (say http://example.com/foo/bar/baz) and that we should match on any URLs that are less
* specific but otherwise the same. For example http://example.com and http://example.com/foo would both match.
*
* We do this by querying the DB for all candidate URIs and then manually checking each one. This isn't very efficient, but this is also probably not a very common type of query to do.
*
* If we ever need to make this more efficient, lucene could certainly be used as an optimization.
*/
ourLog.info("Searching for candidate URI:above parameters for Resource[{}] param[{}]", myResourceName, theParamName);
Collection<String> candidates = myResourceIndexedSearchParamUriDao.findAllByResourceTypeAndParamName(myResourceName, theParamName);
List<String> toFind = new ArrayList<String>();
for (String next : candidates) {
if (value.length() >= next.length()) {
if (value.substring(0, next.length()).equals(next)) {
toFind.add(next);
}
}
}
if (toFind.isEmpty()) {
continue;
}
predicate = fromObj.as(String.class).in(toFind);
} else if (param.getQualifier() == UriParamQualifierEnum.BELOW) {
predicate = builder.like(fromObj.as(String.class), createLeftMatchLikeExpression(value));
} else {
predicate = builder.equal(fromObj.as(String.class), value);
}
codePredicates.add(predicate);
} else {
throw new IllegalArgumentException("Invalid URI type: " + params.getClass());
}
}
if (codePredicates.isEmpty()) {
doSetPids(new HashSet<Long>());
return;
}
List<Predicate> predicates = new ArrayList<Predicate>();
predicates.add(builder.equal(from.get("myResourceType"), myResourceName));
predicates.add(builder.equal(from.get("myParamName"), theParamName));
predicates.add(builder.or(toArray(codePredicates)));
createPredicateResourceId(builder, cq, predicates, from.get("myResourcePid").as(Long.class));
cq.where(builder.and(toArray(predicates)));
TypedQuery<Long> q = myEntityManager.createQuery(cq);
doSetPids(new HashSet<Long>(q.getResultList()));
}
private Predicate createCompositeParamPart(CriteriaBuilder builder, Root<ResourceTable> from, RuntimeSearchParam left, IQueryParameterType leftValue) {
Predicate retVal = null;
switch (left.getParamType()) {
case STRING: {
From<ResourceIndexedSearchParamString, ResourceIndexedSearchParamString> stringJoin = from.join("myParamsString", JoinType.INNER);
retVal = createPredicateString(leftValue, left.getName(), builder, stringJoin);
break;
}
case TOKEN: {
From<ResourceIndexedSearchParamToken, ResourceIndexedSearchParamToken> tokenJoin = from.join("myParamsToken", JoinType.INNER);
retVal = createPredicateToken(leftValue, left.getName(), builder, tokenJoin);
break;
}
case DATE: {
From<ResourceIndexedSearchParamDate, ResourceIndexedSearchParamDate> dateJoin = from.join("myParamsDate", JoinType.INNER);
retVal = createPredicateDate(builder, dateJoin, leftValue);
break;
}
}
if (retVal == null) {
throw new InvalidRequestException("Don't know how to handle composite parameter with type of " + left.getParamType());
}
return retVal;
}
private Predicate createPredicateDate(CriteriaBuilder theBuilder, From<ResourceIndexedSearchParamDate, ResourceIndexedSearchParamDate> theFrom, IQueryParameterType theParam) {
Predicate p;
if (theParam instanceof DateParam) {
DateParam date = (DateParam) theParam;
if (!date.isEmpty()) {
DateRangeParam range = new DateRangeParam(date);
p = createPredicateDateFromRange(theBuilder, theFrom, range);
} else {
// TODO: handle missing date param?
p = null;
}
} else if (theParam instanceof DateRangeParam) {
DateRangeParam range = (DateRangeParam) theParam;
p = createPredicateDateFromRange(theBuilder, theFrom, range);
} else {
throw new IllegalArgumentException("Invalid token type: " + theParam.getClass());
}
return p;
}
private Predicate createPredicateDateFromRange(CriteriaBuilder theBuilder, From<ResourceIndexedSearchParamDate, ResourceIndexedSearchParamDate> theFrom, DateRangeParam theRange) {
Date lowerBound = theRange.getLowerBoundAsInstant();
Date upperBound = theRange.getUpperBoundAsInstant();
Predicate lb = null;
if (lowerBound != null) {
Predicate gt = theBuilder.greaterThanOrEqualTo(theFrom.<Date> get("myValueLow"), lowerBound);
Predicate lt = theBuilder.greaterThanOrEqualTo(theFrom.<Date> get("myValueHigh"), lowerBound);
if (theRange.getLowerBound().getPrefix() == ParamPrefixEnum.STARTS_AFTER) {
lb = gt;
} else {
lb = theBuilder.or(gt, lt);
}
}
Predicate ub = null;
if (upperBound != null) {
Predicate gt = theBuilder.lessThanOrEqualTo(theFrom.<Date> get("myValueLow"), upperBound);
Predicate lt = theBuilder.lessThanOrEqualTo(theFrom.<Date> get("myValueHigh"), upperBound);
if (theRange.getUpperBound().getPrefix() == ParamPrefixEnum.ENDS_BEFORE) {
ub = lt;
} else {
ub = theBuilder.or(gt, lt);
}
}
if (lb != null && ub != null) {
return (theBuilder.and(lb, ub));
} else if (lb != null) {
return (lb);
} else {
return (ub);
}
}
private void createPredicateLastUpdatedForIndexedSearchParam(CriteriaBuilder builder, Root<? extends BaseResourceIndexedSearchParam> from, List<Predicate> predicates) {
DateRangeParam lastUpdated = myParams.getLastUpdatedAndRemove();
if (lastUpdated != null) {
From<BaseResourceIndexedSearchParam, ResourceTable> defJoin = from.join("myResource", JoinType.INNER);
List<Predicate> lastUpdatedPredicates = createLastUpdatedPredicates(lastUpdated, builder, defJoin);
predicates.addAll(lastUpdatedPredicates);
}
}
private void createPredicateLastUpdatedForResourceLink(CriteriaBuilder builder, Root<ResourceLink> from, List<Predicate> predicates) {
DateRangeParam lastUpdated = myParams.getLastUpdatedAndRemove();
if (lastUpdated != null) {
From<BaseResourceIndexedSearchParam, ResourceTable> defJoin = from.join("mySourceResource", JoinType.INNER);
List<Predicate> lastUpdatedPredicates = createLastUpdatedPredicates(lastUpdated, builder, defJoin);
predicates.addAll(lastUpdatedPredicates);
}
}
private void createPredicateLastUpdatedForResourceTable(CriteriaBuilder builder, Root<ResourceTable> from, List<Predicate> predicates) {
predicates.addAll(createLastUpdatedPredicates(myParams.getLastUpdatedAndRemove(), builder, from));
}
private Predicate createPredicateNumeric(CriteriaBuilder builder, IQueryParameterType params, ParamPrefixEnum cmpValue, BigDecimal valueValue, final Expression<BigDecimal> path,
String invalidMessageName, String theValueString) {
Predicate num;
switch (cmpValue) {
case GREATERTHAN:
num = builder.gt(path, valueValue);
break;
case GREATERTHAN_OR_EQUALS:
num = builder.ge(path, valueValue);
break;
case LESSTHAN:
num = builder.lt(path, valueValue);
break;
case LESSTHAN_OR_EQUALS:
num = builder.le(path, valueValue);
break;
case APPROXIMATE:
case EQUAL:
case NOT_EQUAL:
BigDecimal mul = calculateFuzzAmount(cmpValue, valueValue);
BigDecimal low = valueValue.subtract(mul, MathContext.DECIMAL64);
BigDecimal high = valueValue.add(mul, MathContext.DECIMAL64);
Predicate lowPred;
Predicate highPred;
if (cmpValue != ParamPrefixEnum.NOT_EQUAL) {
lowPred = builder.ge(path.as(BigDecimal.class), low);
highPred = builder.le(path.as(BigDecimal.class), high);
num = builder.and(lowPred, highPred);
ourLog.trace("Searching for {} <= val <= {}", low, high);
} else {
// Prefix was "ne", so reverse it!
lowPred = builder.lt(path.as(BigDecimal.class), low);
highPred = builder.gt(path.as(BigDecimal.class), high);
num = builder.or(lowPred, highPred);
}
break;
default:
String msg = myContext.getLocalizer().getMessage(SearchBuilder.class, invalidMessageName, cmpValue.getValue(), params.getValueAsQueryToken(myContext));
throw new InvalidRequestException(msg);
}
return num;
}
private void createPredicateResourceId(CriteriaBuilder builder, CriteriaQuery<?> cq, List<Predicate> thePredicates, Expression<Long> theExpression) {
if (myParams.isPersistResults()) {
if (mySearchEntity.getTotalCount() > -1) {
Subquery<Long> subQ = cq.subquery(Long.class);
Root<SearchResult> subQfrom = subQ.from(SearchResult.class);
subQ.select(subQfrom.get("myResourcePid").as(Long.class));
Predicate subQname = builder.equal(subQfrom.get("mySearch"), mySearchEntity);
subQ.where(subQname);
thePredicates.add(theExpression.in(subQ));
}
} else {
if (myPids != null) {
thePredicates.add(theExpression.in(myPids));
}
}
}
private Predicate createPredicateString(IQueryParameterType theParameter, String theParamName, CriteriaBuilder theBuilder,
From<ResourceIndexedSearchParamString, ResourceIndexedSearchParamString> theFrom) {
String rawSearchTerm;
if (theParameter instanceof TokenParam) {
TokenParam id = (TokenParam) theParameter;
if (!id.isText()) {
throw new IllegalStateException("Trying to process a text search on a non-text token parameter");
}
rawSearchTerm = id.getValue();
} else if (theParameter instanceof StringParam) {
StringParam id = (StringParam) theParameter;
rawSearchTerm = id.getValue();
} else if (theParameter instanceof IPrimitiveDatatype<?>) {
IPrimitiveDatatype<?> id = (IPrimitiveDatatype<?>) theParameter;
rawSearchTerm = id.getValueAsString();
} else {
throw new IllegalArgumentException("Invalid token type: " + theParameter.getClass());
}
if (rawSearchTerm.length() > ResourceIndexedSearchParamString.MAX_LENGTH) {
throw new InvalidRequestException("Parameter[" + theParamName + "] has length (" + rawSearchTerm.length() + ") that is longer than maximum allowed ("
+ ResourceIndexedSearchParamString.MAX_LENGTH + "): " + rawSearchTerm);
}
String likeExpression = BaseHapiFhirDao.normalizeString(rawSearchTerm);
likeExpression = createLeftMatchLikeExpression(likeExpression);
Predicate singleCode = theBuilder.like(theFrom.get("myValueNormalized").as(String.class), likeExpression);
if (theParameter instanceof StringParam && ((StringParam) theParameter).isExact()) {
Predicate exactCode = theBuilder.equal(theFrom.get("myValueExact"), rawSearchTerm);
singleCode = theBuilder.and(singleCode, exactCode);
}
return singleCode;
}
private Predicate createPredicateToken(IQueryParameterType theParameter, String theParamName, CriteriaBuilder theBuilder,
From<ResourceIndexedSearchParamToken, ResourceIndexedSearchParamToken> theFrom) {
String code;
String system;
if (theParameter instanceof TokenParam) {
TokenParam id = (TokenParam) theParameter;
system = id.getSystem();
code = id.getValue();
} else if (theParameter instanceof BaseIdentifierDt) {
BaseIdentifierDt id = (BaseIdentifierDt) theParameter;
system = id.getSystemElement().getValueAsString();
code = id.getValueElement().getValue();
} else if (theParameter instanceof BaseCodingDt) {
BaseCodingDt id = (BaseCodingDt) theParameter;
system = id.getSystemElement().getValueAsString();
code = id.getCodeElement().getValue();
} else {
throw new IllegalArgumentException("Invalid token type: " + theParameter.getClass());
}
if (system != null && system.length() > ResourceIndexedSearchParamToken.MAX_LENGTH) {
throw new InvalidRequestException(
"Parameter[" + theParamName + "] has system (" + system.length() + ") that is longer than maximum allowed (" + ResourceIndexedSearchParamToken.MAX_LENGTH + "): " + system);
}
if (code != null && code.length() > ResourceIndexedSearchParamToken.MAX_LENGTH) {
throw new InvalidRequestException(
"Parameter[" + theParamName + "] has code (" + code.length() + ") that is longer than maximum allowed (" + ResourceIndexedSearchParamToken.MAX_LENGTH + "): " + code);
}
ArrayList<Predicate> singleCodePredicates = (new ArrayList<Predicate>());
if (StringUtils.isNotBlank(system)) {
singleCodePredicates.add(theBuilder.equal(theFrom.get("mySystem"), system));
} else if (system == null) {
// don't check the system
} else {
// If the system is "", we only match on null systems
singleCodePredicates.add(theBuilder.isNull(theFrom.get("mySystem")));
}
if (StringUtils.isNotBlank(code)) {
singleCodePredicates.add(theBuilder.equal(theFrom.get("myValue"), code));
} else {
singleCodePredicates.add(theBuilder.isNull(theFrom.get("myValue")));
}
Predicate singleCode = theBuilder.and(toArray(singleCodePredicates));
return singleCode;
}
private Predicate createResourceLinkPathPredicate(String theParamName, CriteriaBuilder builder, Root<? extends ResourceLink> from) {
RuntimeSearchParam param = myContext.getResourceDefinition(myResourceType).getSearchParam(theParamName);
List<String> path = param.getPathsSplit();
Predicate type = from.get("mySourcePath").in(path);
return type;
}
private TypedQuery<Long> createSearchAllByTypeQuery(DateRangeParam theLastUpdated) {
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Long> cq = builder.createQuery(Long.class);
Root<ResourceTable> from = cq.from(ResourceTable.class);
cq.select(from.get("myId").as(Long.class));
List<Predicate> predicates = new ArrayList<Predicate>();
predicates.add(builder.equal(from.get("myResourceType"), myResourceName));
predicates.add(builder.isNull(from.get("myDeleted")));
if (theLastUpdated != null) {
predicates.addAll(createLastUpdatedPredicates(theLastUpdated, builder, from));
}
cq.where(toArray(predicates));
TypedQuery<Long> query = myEntityManager.createQuery(cq);
return query;
}
private void createSort(CriteriaBuilder theBuilder, Root<ResourceTable> theFrom, SortSpec theSort, List<Order> theOrders, List<Predicate> thePredicates) {
if (theSort == null || isBlank(theSort.getParamName())) {
return;
}
if (BaseResource.SP_RES_ID.equals(theSort.getParamName())) {
From<?, ?> forcedIdJoin = theFrom.join("myForcedId", JoinType.LEFT);
if (theSort.getOrder() == null || theSort.getOrder() == SortOrderEnum.ASC) {
theOrders.add(theBuilder.asc(forcedIdJoin.get("myForcedId")));
theOrders.add(theBuilder.asc(theFrom.get("myId")));
} else {
theOrders.add(theBuilder.desc(forcedIdJoin.get("myForcedId")));
theOrders.add(theBuilder.desc(theFrom.get("myId")));
}
createSort(theBuilder, theFrom, theSort.getChain(), theOrders, thePredicates);
return;
}
if (Constants.PARAM_LASTUPDATED.equals(theSort.getParamName())) {
if (theSort.getOrder() == null || theSort.getOrder() == SortOrderEnum.ASC) {
theOrders.add(theBuilder.asc(theFrom.get("myUpdated")));
} else {
theOrders.add(theBuilder.desc(theFrom.get("myUpdated")));
}
createSort(theBuilder, theFrom, theSort.getChain(), theOrders, thePredicates);
return;
}
RuntimeResourceDefinition resourceDef = myContext.getResourceDefinition(myResourceType);
RuntimeSearchParam param = resourceDef.getSearchParam(theSort.getParamName());
if (param == null) {
throw new InvalidRequestException("Unknown sort parameter '" + theSort.getParamName() + "'");
}
String joinAttrName;
String[] sortAttrName;
switch (param.getParamType()) {
case STRING:
joinAttrName = "myParamsString";
sortAttrName = new String[] { "myValueExact" };
break;
case DATE:
joinAttrName = "myParamsDate";
sortAttrName = new String[] { "myValueLow" };
break;
case REFERENCE:
joinAttrName = "myResourceLinks";
sortAttrName = new String[] { "myTargetResourcePid" };
break;
case TOKEN:
joinAttrName = "myParamsToken";
sortAttrName = new String[] { "mySystem", "myValue" };
break;
case NUMBER:
joinAttrName = "myParamsNumber";
sortAttrName = new String[] { "myValue" };
break;
case URI:
joinAttrName = "myParamsUri";
sortAttrName = new String[] { "myUri" };
break;
case QUANTITY:
joinAttrName = "myParamsQuantity";
sortAttrName = new String[] { "myValue" };
break;
default:
throw new InvalidRequestException("This server does not support _sort specifications of type " + param.getParamType() + " - Can't serve _sort=" + theSort.getParamName());
}
From<?, ?> stringJoin = theFrom.join(joinAttrName, JoinType.INNER);
if (param.getParamType() == RestSearchParameterTypeEnum.REFERENCE) {
thePredicates.add(stringJoin.get("mySourcePath").as(String.class).in(param.getPathsSplit()));
} else {
thePredicates.add(theBuilder.equal(stringJoin.get("myParamName"), theSort.getParamName()));
}
// Predicate p = theBuilder.equal(stringJoin.get("myParamName"), theSort.getParamName());
// Predicate pn = theBuilder.isNull(stringJoin.get("myParamName"));
// thePredicates.add(theBuilder.or(p, pn));
for (String next : sortAttrName) {
if (theSort.getOrder() == null || theSort.getOrder() == SortOrderEnum.ASC) {
theOrders.add(theBuilder.asc(stringJoin.get(next)));
} else {
theOrders.add(theBuilder.desc(stringJoin.get(next)));
}
}
createSort(theBuilder, theFrom, theSort.getChain(), theOrders, thePredicates);
}
public Set<Long> doGetPids() {
if (myParams.isPersistResults()) {
HashSet<Long> retVal = new HashSet<Long>();
for (SearchResult next : mySearchResultDao.findWithSearchUuid(mySearchEntity)) {
retVal.add(next.getResourcePid());
}
return retVal;
} else {
return new HashSet<Long>(myPids);
}
}
private boolean doHaveNoResults() {
if (myParams.isPersistResults()) {
return mySearchEntity.getTotalCount() == 0;
} else {
return myPids != null && myPids.isEmpty();
}
}
private void doInitializeSearch() {
if (mySearchEntity == null) {
mySearchEntity = new Search();
mySearchEntity.setUuid(UUID.randomUUID().toString());
mySearchEntity.setCreated(new Date());
mySearchEntity.setTotalCount(-1);
mySearchEntity.setPreferredPageSize(myParams.getCount());
mySearchEntity.setSearchType(myParams.getEverythingMode() != null ? SearchTypeEnum.EVERYTHING : SearchTypeEnum.SEARCH);
mySearchEntity.setLastUpdated(myParams.getLastUpdated());
for (Include next : myParams.getIncludes()) {
mySearchEntity.getIncludes().add(new SearchInclude(mySearchEntity, next.getValue(), false, next.isRecurse()));
}
for (Include next : myParams.getRevIncludes()) {
mySearchEntity.getIncludes().add(new SearchInclude(mySearchEntity, next.getValue(), true, next.isRecurse()));
}
if (myParams.isPersistResults()) {
myEntityManager.persist(mySearchEntity);
for (SearchInclude next : mySearchEntity.getIncludes()) {
myEntityManager.persist(next);
}
}
}
}
private IBundleProvider doReturnProvider() {
if (myParams.isPersistResults()) {
return new PersistedJpaBundleProvider(mySearchEntity.getUuid(), myCallingDao);
} else {
if (myPids == null) {
return new SimpleBundleProvider();
} else {
return new BundleProviderInMemory(myPids);
}
}
}
private void doSetPids(Collection<Long> thePids) {
if (myParams.isPersistResults()) {
mySearchResultDao.deleteForSearch(mySearchEntity.getId());
mySearchResultDao.flush();
LinkedHashSet<SearchResult> results = new LinkedHashSet<SearchResult>();
int index = 0;
for (Long next : thePids) {
SearchResult nextResult = new SearchResult(mySearchEntity);
nextResult.setResourcePid(next);
nextResult.setOrder(index);
results.add(nextResult);
index++;
}
mySearchResultDao.save(results);
mySearchEntity.setTotalCount(results.size());
mySearchEntity = myEntityManager.merge(mySearchEntity);
myEntityManager.flush();
} else {
myPids = thePids;
}
}
private void filterResourceIdsByLastUpdated(final DateRangeParam theLastUpdated) {
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Long> cq = builder.createQuery(Long.class);
Root<ResourceTable> from = cq.from(ResourceTable.class);
cq.select(from.get("myId").as(Long.class));
List<Predicate> lastUpdatedPredicates = createLastUpdatedPredicates(theLastUpdated, builder, from);
createPredicateResourceId(builder, cq, lastUpdatedPredicates, from.get("myId").as(Long.class));
cq.where(SearchBuilder.toArray(lastUpdatedPredicates));
TypedQuery<Long> query = myEntityManager.createQuery(cq);
List<Long> resultList = query.getResultList();
doSetPids(resultList);
}
private void loadResourcesByPid(Collection<Long> theIncludePids, List<IBaseResource> theResourceListToPopulate, Set<Long> theRevIncludedPids, boolean theForHistoryOperation) {
EntityManager entityManager = myEntityManager;
FhirContext context = myContext;
BaseHapiFhirDao<?> dao = myCallingDao;
loadResourcesByPid(theIncludePids, theResourceListToPopulate, theRevIncludedPids, theForHistoryOperation, entityManager, context, dao);
}
private void processSort(final SearchParameterMap theParams) {
// Set<Long> loadPids = theLoadPids;
if (theParams.getSort() != null && isNotBlank(theParams.getSort().getParamName())) {
List<Order> orders = new ArrayList<Order>();
List<Predicate> predicates = new ArrayList<Predicate>();
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Tuple> cq = builder.createTupleQuery();
Root<ResourceTable> from = cq.from(ResourceTable.class);
createPredicateResourceId(builder, cq, predicates, from.get("myId").as(Long.class));
createSort(builder, from, theParams.getSort(), orders, predicates);
if (orders.size() > 0) {
// TODO: why do we need the existing list for this join to work?
Collection<Long> originalPids = doGetPids();
LinkedHashSet<Long> loadPids = new LinkedHashSet<Long>();
cq.multiselect(from.get("myId").as(Long.class));
cq.where(toArray(predicates));
cq.orderBy(orders);
TypedQuery<Tuple> query = myEntityManager.createQuery(cq);
for (Tuple next : query.getResultList()) {
loadPids.add(next.get(0, Long.class));
}
ourLog.debug("Sort PID order is now: {}", loadPids);
ArrayList<Long> pids = new ArrayList<Long>(loadPids);
// Any ressources which weren't matched by the sort get added to the bottom
for (Long next : originalPids) {
if (loadPids.contains(next) == false) {
pids.add(next);
}
}
doSetPids(pids);
}
}
}
public IBundleProvider search(final SearchParameterMap theParams) {
myParams = theParams;
StopWatch w = new StopWatch();
doInitializeSearch();
DateRangeParam lu = theParams.getLastUpdated();
// Collection<Long> loadPids;
if (theParams.getEverythingMode() != null) {
Long pid = null;
if (theParams.get(BaseResource.SP_RES_ID) != null) {
StringParam idParm = (StringParam) theParams.get(BaseResource.SP_RES_ID).get(0).get(0);
pid = BaseHapiFhirDao.translateForcedIdToPid(new IdDt(idParm.getValue()), myEntityManager);
}
if (theParams.containsKey(Constants.PARAM_CONTENT) || theParams.containsKey(Constants.PARAM_TEXT)) {
List<Long> pids = mySearchDao.everything(myResourceName, theParams);
if (pids.isEmpty()) {
return doReturnProvider();
}
doSetPids(pids);
} else {
CriteriaBuilder builder = myEntityManager.getCriteriaBuilder();
CriteriaQuery<Tuple> cq = builder.createTupleQuery();
Root<ResourceTable> from = cq.from(ResourceTable.class);
List<Predicate> predicates = new ArrayList<Predicate>();
if (pid != null) {
predicates.add(builder.equal(from.get("myId"), pid));
}
predicates.add(builder.equal(from.get("myResourceType"), myResourceName));
predicates.add(builder.isNull(from.get("myDeleted")));
cq.where(builder.and(SearchBuilder.toArray(predicates)));
Join<Object, Object> join = from.join("myIncomingResourceLinks", JoinType.LEFT);
cq.multiselect(from.get("myId").as(Long.class), join.get("mySourceResourcePid").as(Long.class));
TypedQuery<Tuple> query = myEntityManager.createQuery(cq);
Set<Long> pids = new HashSet<Long>();
for (Tuple next : query.getResultList()) {
pids.add(next.get(0, Long.class));
Long nextLong = next.get(1, Long.class);
if (nextLong != null) {
pids.add(nextLong);
}
}
doSetPids(pids);
}
} else if (theParams.isEmpty()) {
TypedQuery<Long> query = createSearchAllByTypeQuery(lu);
doSetPids(query.getResultList());
} else {
if (mySearchDao == null) {
if (theParams.containsKey(Constants.PARAM_TEXT)) {
throw new InvalidRequestException("Fulltext search is not enabled on this service, can not process parameter: " + Constants.PARAM_TEXT);
} else if (theParams.containsKey(Constants.PARAM_CONTENT)) {
throw new InvalidRequestException("Fulltext search is not enabled on this service, can not process parameter: " + Constants.PARAM_CONTENT);
}
} else {
List<Long> searchResultPids = mySearchDao.search(myResourceName, theParams);
if (searchResultPids != null) {
if (searchResultPids.isEmpty()) {
return doReturnProvider();
}
doSetPids(searchResultPids);
}
}
if (!theParams.isEmpty()) {
searchForIdsWithAndOr(theParams, lu);
}
}
// // Load _include and _revinclude before filter and sort in everything mode
// if (theParams.getEverythingMode() != null) {
// if (theParams.getRevIncludes() != null && theParams.getRevIncludes().isEmpty() == false) {
// loadPids.addAll(loadReverseIncludes(loadPids, theParams.getRevIncludes(), true,
// theParams.getEverythingMode()));
// loadPids.addAll(loadReverseIncludes(loadPids, theParams.getIncludes(), false, theParams.getEverythingMode()));
// }
// }
if (doHaveNoResults()) {
return doReturnProvider();
}
// Handle _lastUpdated
if (lu != null) {
filterResourceIdsByLastUpdated(lu);
if (doHaveNoResults()) {
return doReturnProvider();
}
}
// Handle sorting if any was provided
processSort(theParams);
ourLog.info(" {} on {} in {}ms", new Object[] { myResourceName, theParams, w.getMillisAndRestart() });
return doReturnProvider();
}
public void searchForIdsWithAndOr(SearchParameterMap theParams, DateRangeParam theLastUpdated) {
SearchParameterMap params = theParams;
if (params == null) {
params = new SearchParameterMap();
}
myParams = theParams;
doInitializeSearch();
RuntimeResourceDefinition resourceDef = myContext.getResourceDefinition(myResourceType);
for (Entry<String, List<List<? extends IQueryParameterType>>> nextParamEntry : params.entrySet()) {
String nextParamName = nextParamEntry.getKey();
if (nextParamName.equals(BaseResource.SP_RES_ID)) {
if (nextParamEntry.getValue().isEmpty()) {
continue;
} else {
for (List<? extends IQueryParameterType> nextValue : nextParamEntry.getValue()) {
Set<Long> joinPids = new HashSet<Long>();
if (nextValue == null || nextValue.size() == 0) {
continue;
} else {
for (IQueryParameterType next : nextValue) {
String value = next.getValueAsQueryToken(myContext);
IIdType valueId = new IdDt(value);
try {
BaseHasResource entity = myCallingDao.readEntity(valueId);
if (entity.getDeleted() != null) {
continue;
}
joinPids.add(entity.getId());
} catch (ResourceNotFoundException e) {
// This isn't an error, just means no result found
}
}
if (joinPids.isEmpty()) {
doSetPids(new HashSet<Long>());
return;
}
}
addPredicateId(joinPids, theLastUpdated);
if (doHaveNoResults()) {
return;
}
}
}
} else if (nextParamName.equals(BaseResource.SP_RES_LANGUAGE)) {
addPredicateLanguage(nextParamEntry.getValue(), theLastUpdated);
} else if (nextParamName.equals(Constants.PARAM_TAG) || nextParamName.equals(Constants.PARAM_PROFILE) || nextParamName.equals(Constants.PARAM_SECURITY)) {
addPredicateTag(nextParamEntry.getValue(), nextParamName, theLastUpdated);
} else {
RuntimeSearchParam nextParamDef = resourceDef.getSearchParam(nextParamName);
if (nextParamDef != null) {
switch (nextParamDef.getParamType()) {
case DATE:
for (List<? extends IQueryParameterType> nextAnd : nextParamEntry.getValue()) {
addPredicateDate(nextParamName, nextAnd);
if (doHaveNoResults()) {
return;
}
}
break;
case QUANTITY:
for (List<? extends IQueryParameterType> nextAnd : nextParamEntry.getValue()) {
addPredicateQuantity(nextParamName, nextAnd);
if (doHaveNoResults()) {
return;
}
}
break;
case REFERENCE:
for (List<? extends IQueryParameterType> nextAnd : nextParamEntry.getValue()) {
addPredicateReference(nextParamName, nextAnd);
if (doHaveNoResults()) {
return;
}
}
break;
case STRING:
for (List<? extends IQueryParameterType> nextAnd : nextParamEntry.getValue()) {
addPredicateString(nextParamName, nextAnd);
if (doHaveNoResults()) {
return;
}
}
break;
case TOKEN:
for (List<? extends IQueryParameterType> nextAnd : nextParamEntry.getValue()) {
addPredicateToken(nextParamName, nextAnd);
if (doHaveNoResults()) {
return;
}
}
break;
case NUMBER:
for (List<? extends IQueryParameterType> nextAnd : nextParamEntry.getValue()) {
addPredicateNumber(nextParamName, nextAnd);
if (doHaveNoResults()) {
return;
}
}
break;
case COMPOSITE:
for (List<? extends IQueryParameterType> nextAnd : nextParamEntry.getValue()) {
addPredicateComposite(nextParamDef, nextAnd);
if (doHaveNoResults()) {
return;
}
}
break;
case URI:
for (List<? extends IQueryParameterType> nextAnd : nextParamEntry.getValue()) {
addPredicateUri(nextParamName, nextAnd);
if (doHaveNoResults()) {
return;
}
}
break;
}
}
}
if (doHaveNoResults()) {
return;
}
}
}
public void setType(Class<? extends IBaseResource> theResourceType, String theResourceName) {
myResourceType = theResourceType;
myResourceName = theResourceName;
}
private List<Long> toList(Collection<Long> theLoadPids) {
final List<Long> pids;
if (theLoadPids instanceof List) {
pids = (List<Long>) theLoadPids;
} else {
pids = new ArrayList<Long>(theLoadPids);
}
return pids;
}
private IQueryParameterType toParameterType(RuntimeSearchParam theParam) {
IQueryParameterType qp;
switch (theParam.getParamType()) {
case DATE:
qp = new DateParam();
break;
case NUMBER:
qp = new NumberParam();
break;
case QUANTITY:
qp = new QuantityParam();
break;
case STRING:
qp = new StringParam();
break;
case TOKEN:
qp = new TokenParam();
break;
case COMPOSITE:
List<RuntimeSearchParam> compositeOf = theParam.getCompositeOf();
if (compositeOf.size() != 2) {
throw new InternalErrorException("Parameter " + theParam.getName() + " has " + compositeOf.size() + " composite parts. Don't know how handlt this.");
}
IQueryParameterType leftParam = toParameterType(compositeOf.get(0));
IQueryParameterType rightParam = toParameterType(compositeOf.get(1));
qp = new CompositeParam<IQueryParameterType, IQueryParameterType>(leftParam, rightParam);
break;
case REFERENCE:
qp = new ReferenceParam();
break;
default:
throw new InternalErrorException("Don't know how to convert param type: " + theParam.getParamType());
}
return qp;
}
private IQueryParameterType toParameterType(RuntimeSearchParam theParam, String theQualifier, String theValueAsQueryToken) {
IQueryParameterType qp = toParameterType(theParam);
qp.setValueAsQueryToken(theQualifier, theValueAsQueryToken); // aaaa
return qp;
}
/**
* Figures out the tolerance for a search. For example, if the user is searching for <code>4.00</code>, this method returns <code>0.005</code> because we shold actually match values which are
* <code>4 (+/-) 0.005</code> according to the FHIR specs.
*/
static BigDecimal calculateFuzzAmount(ParamPrefixEnum cmpValue, BigDecimal theValue) {
if (cmpValue == ParamPrefixEnum.APPROXIMATE) {
return theValue.multiply(new BigDecimal(0.1));
} else {
String plainString = theValue.toPlainString();
int dotIdx = plainString.indexOf('.');
if (dotIdx == -1) {
return new BigDecimal(0.5);
}
int precision = plainString.length() - (dotIdx);
double mul = Math.pow(10, -precision);
double val = mul * 5.0d;
return new BigDecimal(val);
}
}
private static List<Predicate> createLastUpdatedPredicates(final DateRangeParam theLastUpdated, CriteriaBuilder builder, From<?, ResourceTable> from) {
List<Predicate> lastUpdatedPredicates = new ArrayList<Predicate>();
if (theLastUpdated != null) {
if (theLastUpdated.getLowerBoundAsInstant() != null) {
Predicate predicateLower = builder.greaterThanOrEqualTo(from.<Date> get("myUpdated"), theLastUpdated.getLowerBoundAsInstant());
lastUpdatedPredicates.add(predicateLower);
}
if (theLastUpdated.getUpperBoundAsInstant() != null) {
Predicate predicateUpper = builder.lessThanOrEqualTo(from.<Date> get("myUpdated"), theLastUpdated.getUpperBoundAsInstant());
lastUpdatedPredicates.add(predicateUpper);
}
}
return lastUpdatedPredicates;
}
private static String createLeftMatchLikeExpression(String likeExpression) {
return likeExpression.replace("%", "[%]") + "%";
}
private static List<Long> filterResourceIdsByLastUpdated(EntityManager theEntityManager, final DateRangeParam theLastUpdated, Collection<Long> thePids) {
CriteriaBuilder builder = theEntityManager.getCriteriaBuilder();
CriteriaQuery<Long> cq = builder.createQuery(Long.class);
Root<ResourceTable> from = cq.from(ResourceTable.class);
cq.select(from.get("myId").as(Long.class));
List<Predicate> lastUpdatedPredicates = createLastUpdatedPredicates(theLastUpdated, builder, from);
lastUpdatedPredicates.add(from.get("myId").as(Long.class).in(thePids));
cq.where(SearchBuilder.toArray(lastUpdatedPredicates));
TypedQuery<Long> query = theEntityManager.createQuery(cq);
List<Long> resultList = query.getResultList();
return resultList;
}
public static void loadResourcesByPid(Collection<Long> theIncludePids, List<IBaseResource> theResourceListToPopulate, Set<Long> theRevIncludedPids, boolean theForHistoryOperation,
EntityManager entityManager, FhirContext context, IDao theDao) {
if (theIncludePids.isEmpty()) {
return;
}
Map<Long, Integer> position = new HashMap<Long, Integer>();
for (Long next : theIncludePids) {
position.put(next, theResourceListToPopulate.size());
theResourceListToPopulate.add(null);
}
CriteriaBuilder builder = entityManager.getCriteriaBuilder();
CriteriaQuery<ResourceTable> cq = builder.createQuery(ResourceTable.class);
Root<ResourceTable> from = cq.from(ResourceTable.class);
cq.where(from.get("myId").in(theIncludePids));
TypedQuery<ResourceTable> q = entityManager.createQuery(cq);
for (ResourceTable next : q.getResultList()) {
Class<? extends IBaseResource> resourceType = context.getResourceDefinition(next.getResourceType()).getImplementingClass();
IBaseResource resource = (IBaseResource) theDao.toResource(resourceType, next, theForHistoryOperation);
Integer index = position.get(next.getId());
if (index == null) {
ourLog.warn("Got back unexpected resource PID {}", next.getId());
continue;
}
if (resource instanceof IResource) {
if (theRevIncludedPids.contains(next.getId())) {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IResource) resource, BundleEntrySearchModeEnum.INCLUDE);
} else {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IResource) resource, BundleEntrySearchModeEnum.MATCH);
}
} else {
if (theRevIncludedPids.contains(next.getId())) {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IAnyResource) resource, BundleEntrySearchModeEnum.INCLUDE.getCode());
} else {
ResourceMetadataKeyEnum.ENTRY_SEARCH_MODE.put((IAnyResource) resource, BundleEntrySearchModeEnum.MATCH.getCode());
}
}
theResourceListToPopulate.set(index, resource);
}
}
/**
* THIS SHOULD RETURN HASHSET and not jsut Set because we add to it later (so it can't be Collections.emptySet())
*
* @param theLastUpdated
*/
public static HashSet<Long> loadReverseIncludes(FhirContext theContext, EntityManager theEntityManager, Collection<Long> theMatches, Set<Include> theRevIncludes, boolean theReverseMode,
DateRangeParam theLastUpdated) {
if (theMatches.size() == 0) {
return new HashSet<Long>();
}
if (theRevIncludes == null || theRevIncludes.isEmpty()) {
return new HashSet<Long>();
}
String searchFieldName = theReverseMode ? "myTargetResourcePid" : "mySourceResourcePid";
Collection<Long> nextRoundMatches = theMatches;
HashSet<Long> allAdded = new HashSet<Long>();
HashSet<Long> original = new HashSet<Long>(theMatches);
ArrayList<Include> includes = new ArrayList<Include>(theRevIncludes);
int roundCounts = 0;
StopWatch w = new StopWatch();
boolean addedSomeThisRound;
do {
roundCounts++;
HashSet<Long> pidsToInclude = new HashSet<Long>();
Set<Long> nextRoundOmit = new HashSet<Long>();
for (Iterator<Include> iter = includes.iterator(); iter.hasNext();) {
Include nextInclude = iter.next();
if (nextInclude.isRecurse() == false) {
iter.remove();
}
boolean matchAll = "*".equals(nextInclude.getValue());
if (matchAll) {
String sql;
sql = "SELECT r FROM ResourceLink r WHERE r." + searchFieldName + " IN (:target_pids)";
TypedQuery<ResourceLink> q = theEntityManager.createQuery(sql, ResourceLink.class);
q.setParameter("target_pids", nextRoundMatches);
List<ResourceLink> results = q.getResultList();
for (ResourceLink resourceLink : results) {
if (theReverseMode) {
// if (theEverythingModeEnum.isEncounter()) {
// if (resourceLink.getSourcePath().equals("Encounter.subject") ||
// resourceLink.getSourcePath().equals("Encounter.patient")) {
// nextRoundOmit.add(resourceLink.getSourceResourcePid());
// }
// }
pidsToInclude.add(resourceLink.getSourceResourcePid());
} else {
pidsToInclude.add(resourceLink.getTargetResourcePid());
}
}
} else {
List<String> paths;
if (theContext.getVersion().getVersion() == FhirVersionEnum.DSTU1) {
paths = Collections.singletonList(nextInclude.getValue());
} else {
String resType = nextInclude.getParamType();
if (isBlank(resType)) {
continue;
}
RuntimeResourceDefinition def = theContext.getResourceDefinition(resType);
if (def == null) {
ourLog.warn("Unknown resource type in include/revinclude=" + nextInclude.getValue());
continue;
}
String paramName = nextInclude.getParamName();
RuntimeSearchParam param = isNotBlank(paramName) ? def.getSearchParam(paramName) : null;
if (param == null) {
ourLog.warn("Unknown param name in include/revinclude=" + nextInclude.getValue());
continue;
}
paths = param.getPathsSplit();
}
String targetResourceType = defaultString(nextInclude.getParamTargetType(), null);
for (String nextPath : paths) {
String sql;
if (targetResourceType != null) {
sql = "SELECT r FROM ResourceLink r WHERE r.mySourcePath = :src_path AND r." + searchFieldName + " IN (:target_pids) AND r.myTargetResourceType = :target_resource_type";
} else {
sql = "SELECT r FROM ResourceLink r WHERE r.mySourcePath = :src_path AND r." + searchFieldName + " IN (:target_pids)";
}
TypedQuery<ResourceLink> q = theEntityManager.createQuery(sql, ResourceLink.class);
q.setParameter("src_path", nextPath);
q.setParameter("target_pids", nextRoundMatches);
if (targetResourceType != null) {
q.setParameter("target_resource_type", targetResourceType);
}
List<ResourceLink> results = q.getResultList();
for (ResourceLink resourceLink : results) {
if (theReverseMode) {
pidsToInclude.add(resourceLink.getSourceResourcePid());
} else {
pidsToInclude.add(resourceLink.getTargetResourcePid());
}
}
}
}
}
if (theLastUpdated != null && (theLastUpdated.getLowerBoundAsInstant() != null || theLastUpdated.getUpperBoundAsInstant() != null)) {
pidsToInclude = new HashSet<Long>(filterResourceIdsByLastUpdated(theEntityManager, theLastUpdated, pidsToInclude));
}
for (Long next : pidsToInclude) {
if (original.contains(next) == false && allAdded.contains(next) == false) {
theMatches.add(next);
}
}
pidsToInclude.removeAll(nextRoundOmit);
addedSomeThisRound = allAdded.addAll(pidsToInclude);
nextRoundMatches = pidsToInclude;
} while (includes.size() > 0 && nextRoundMatches.size() > 0 && addedSomeThisRound);
ourLog.info("Loaded {} {} in {} rounds and {} ms", new Object[] { allAdded.size(), theReverseMode ? "_revincludes" : "_includes", roundCounts, w.getMillisAndRestart() });
return allAdded;
}
static Predicate[] toArray(List<Predicate> thePredicates) {
return thePredicates.toArray(new Predicate[thePredicates.size()]);
}
private final class BundleProviderInMemory implements IBundleProvider {
private final ArrayList<Long> myPids;
private BundleProviderInMemory(Collection<Long> thePids) {
final ArrayList<Long> pids;
if (!(thePids instanceof List)) {
pids = new ArrayList<Long>(thePids);
} else {
pids = (ArrayList<Long>) thePids;
}
myPids = pids;
}
@Override
public InstantDt getPublished() {
return new InstantDt(mySearchEntity.getCreated());
}
@Override
public List<IBaseResource> getResources(final int theFromIndex, final int theToIndex) {
TransactionTemplate template = new TransactionTemplate(myPlatformTransactionManager);
return template.execute(new TransactionCallback<List<IBaseResource>>() {
@Override
public List<IBaseResource> doInTransaction(TransactionStatus theStatus) {
List<Long> pidsSubList = myPids.subList(theFromIndex, theToIndex);
// Load includes
pidsSubList = new ArrayList<Long>(pidsSubList);
Set<Long> revIncludedPids = new HashSet<Long>();
if (myParams.getEverythingMode() == null) {
revIncludedPids.addAll(loadReverseIncludes(myContext, myEntityManager, pidsSubList, myParams.getRevIncludes(), true, myParams.getLastUpdated()));
}
revIncludedPids.addAll(loadReverseIncludes(myContext, myEntityManager, pidsSubList, myParams.getIncludes(), false, myParams.getLastUpdated()));
// Execute the query and make sure we return distinct results
List<IBaseResource> resources = new ArrayList<IBaseResource>();
loadResourcesByPid(pidsSubList, resources, revIncludedPids, false);
return resources;
}
});
}
@Override
public Integer preferredPageSize() {
return myParams.getCount();
}
@Override
public int size() {
return myPids.size();
}
}
}
| apache-2.0 |
wanglidog/coolweathers | app/src/main/java/com/coolweathers/android/util/Utility.java | 3482 | package com.coolweathers.android.util;
import android.text.TextUtils;
import com.coolweathers.android.db.City;
import com.coolweathers.android.db.County;
import com.coolweathers.android.db.Province;
import com.coolweathers.android.gson.Weather;
import com.google.gson.Gson;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
public class Utility {
/**
* 解析和处理服务器返回的省级数据
*/
public static boolean handleProvinceResponse(String response) {
if (!TextUtils.isEmpty(response)) {
try {
JSONArray allProvinces = new JSONArray(response);
for (int i = 0; i < allProvinces.length(); i++) {
JSONObject provinceObject = allProvinces.getJSONObject(i);
Province province = new Province();
province.setProvinceName(provinceObject.getString("name"));
province.setProvinceCode(provinceObject.getInt("id"));
province.save();
}
return true;
} catch (JSONException e) {
e.printStackTrace();
}
}
return false;
}
/**
* 解析和处理服务器返回的市级数据
*/
public static boolean handleCityResponse(String response, int provinceId) {
if (!TextUtils.isEmpty(response)) {
try {
JSONArray allCities = new JSONArray(response);
for (int i = 0; i < allCities.length(); i++) {
JSONObject cityObject = allCities.getJSONObject(i);
City city = new City();
city.setCityName(cityObject.getString("name"));
city.setCityCode(cityObject.getInt("id"));
city.setProvinceId(provinceId);
city.save();
}
return true;
} catch (JSONException e) {
e.printStackTrace();
}
}
return false;
}
/**
* 解析和处理服务器返回的县级数据
*/
public static boolean handleCountyResponse(String response, int cityId) {
if (!TextUtils.isEmpty(response)) {
try {
JSONArray allCounties = new JSONArray(response);
for (int i = 0; i < allCounties.length(); i++) {
JSONObject countyObject = allCounties.getJSONObject(i);
County county = new County();
county.setCountyName(countyObject.getString("name"));
county.setWeatherId(countyObject.getString("weather_id"));
county.setCityId(cityId);
county.save();
}
return true;
} catch (JSONException e) {
e.printStackTrace();
}
}
return false;
}
/**
* 将返回的JSON数据解析成Weather实体类
*/
public static Weather handleWeatherResponse(String response) {
try {
JSONObject jsonObject = new JSONObject(response);
JSONArray jsonArray = jsonObject.getJSONArray("HeWeather");
String weatherContent = jsonArray.getJSONObject(0).toString();
return new Gson().fromJson(weatherContent, Weather.class);
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
}
| apache-2.0 |