text
stringlengths 7
1.01M
|
|---|
package io.onedev.server.web.opengraph;
public enum OpenGraphHeaderMetaType {
Image,
Title,
Description,
Url;
@Override
public String toString() {
switch(this) {
case Image: return "og:image";
case Title: return "og:title";
case Description: return "og:description";
case Url: return "og:url";
// Unreachable
default: return null;
}
}
}
|
package com.java2nb.novel.vo;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.java2nb.novel.entity.News;
import lombok.Data;
import java.util.Date;
/**
* @author Administrator
*/
@Data
public class NewsVO extends News {
@JsonFormat(timezone = "GMT+7", pattern = "dd-MM-yyyy")
private Date createTime;
}
|
public class TileEntityMobSpawnerRenderer {
// Failed to decompile, took too long to decompile: net/minecraft/client/renderer/tileentity/TileEntityMobSpawnerRenderer
}
|
package com.zhou.blog.service;
import com.zhou.blog.vo.Result;
import com.zhou.blog.vo.TagVo;
import java.util.List;
public interface TagService {
List<TagVo> findTagsByArticleId(Long articleId);
Result hots(int limit);
/*
* 查询所有文章标签
* */
Result findAll();
Result findDetailById(Long id);
}
|
package com.jl.crm.client;
import org.springframework.social.connect.*;
import org.springframework.web.client.HttpStatusCodeException;
/** @author Josh Long */
public class CrmApiAdapter implements ApiAdapter<CrmOperations> {
@Override
public boolean test(CrmOperations customerServiceOperations) {
boolean everythingAlright = false;
try {
everythingAlright = (null != customerServiceOperations.currentUser());
}
catch (HttpStatusCodeException e) {
// ignore since we're just trying to test for connectivity.
System.err.println( "HttpStatusCodeException : something very wrong happened here. " + e);
}
catch ( Error r) {
System.err.println( "Error : Something very wrong happened here. " + r );
}
return everythingAlright;
}
@Override
public void setConnectionValues(CrmOperations customerServiceOperations, ConnectionValues values) {
User profile = customerServiceOperations.currentUser();
values.setProviderUserId(Long.toString(profile.getId()));
values.setDisplayName(profile.getUsername());
}
@Override
public UserProfile fetchUserProfile(CrmOperations customerServiceOperations) {
User user = customerServiceOperations.currentUser();
String name = user.getFirstName() + ' ' + user.getLastName();
return new UserProfileBuilder()
.setName(name)
.setUsername(user.getUsername())
.setFirstName(user.getFirstName())
.setLastName(user.getLastName())
.build();
}
@Override
public void updateStatus(CrmOperations customerServiceOperations, String message) {
System.out.println(String.format("calling updateStatus(CustomerServiceOperations customerServiceOperations, " +
"String message) with the status '%s', but this method is a no-op!", message));
}
}
|
package com.wangqing.chilemeserver.repository;
import com.wangqing.chilemeserver.object.dbo.UserInfo;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.stereotype.Repository;
import javax.transaction.Transactional;
@Repository
public interface UserInfoRepository extends JpaRepository<UserInfo, Integer> {
/* 根据用户id 将用户的信息查出 */
UserInfo findByUserId(Integer userId);
/* 根据用户id 修改昵称 */
@Transactional
@Modifying
@Query(value = "UPDATE user_info SET nike_name = ?1 WHERE user_id = ?2", nativeQuery = true)
int updateNikeNameByUserId(String nikeName, Integer userId);
/* 根据用户id 修改简介 */
@Transactional
@Modifying
@Query(value = "UPDATE user_info SET intro = ?1 WHERE user_id = ?2", nativeQuery = true)
int updateIntroByUserId(String intro, Integer userId);
/* 根据用户id 修改头像存储地址 */
@Transactional
@Modifying
@Query(value = "UPDATE user_info SET avatar_url = ?1 WHERE user_id = ?2", nativeQuery = true)
int updateAvatarUrlByUserId(String avatarUrl, Integer userId);
/* 根据用户id 修改封面存储地址 */
@Transactional
@Modifying
@Query(value = "UPDATE user_info SET cover_url = ?1 WHERE user_id = ?2", nativeQuery = true)
int updateCoverUrlByUserId(String coverUrl, Integer userId);
}
|
package com.actuator;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest
class ActuatorApplicationTests {
@Test
void contextLoads() {
}
}
|
package it.univaq.mwt.business;
import java.util.List;
import org.springframework.web.multipart.commons.CommonsMultipartFile;
import it.univaq.mwt.business.model.*;
public interface AlbumService {
Album getAllCanzoniByAlbumId(int albumId);
List<Album> getAllAlbumsByGroupId(int groupId);
void deleteAlbum(int albumID);
int emptyAlbum(Album alb);
void saveAlbumWithSong(Utente utente, Album album, CommonsMultipartFile[] tracce);
void updateAlbum(Album a);
}
|
package com.main.daycare_administrative_system;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.fxml.FXML;
import javafx.fxml.Initializable;
import javafx.scene.control.Button;
import javafx.scene.control.Label;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.layout.AnchorPane;
import javafx.scene.layout.VBox;
import javafx.scene.text.Font;
import javafx.scene.text.FontPosture;
import javafx.scene.text.FontWeight;
import java.net.URL;
import java.sql.*;
import java.util.ResourceBundle;
public class TelephoneController implements Initializable {
private static PreparedStatement preparedStatement = null;
private static ResultSet resultSet = null;
private static Connection connect = null;
private static final String url = "jdbc:mysql://localhost:3306/daycare";
private static final String user = "root";
private static final String pass = "pass";
@FXML private ImageView userImage;
@FXML private Label userName;
@FXML private VBox container;
@FXML private Button back;
/* Establishes connection to the MySQL Database */
private static void connection() {
try {
connect = DriverManager.getConnection(url, user, pass);
} catch (SQLException e) {
e.printStackTrace();
}
}
/* Closes connection to the MySQL Database */
private static void closeConnection() {
if (resultSet != null) {
try {
resultSet.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
if (preparedStatement != null) {
try {
preparedStatement.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
if (connect != null) {
try {
connect.close();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
/**
* Takes phone number, first name, and last name, and generates a UI element AnchorPane.
* This Pane can then be injected into a view dynamically every time the view is initialized.
* @param t telephone number to print out
* @param firstName to concatenate as first part of fullName
* @param lastName to concatenate as last part of fullName
*/
public void injectTelephone(String t, String firstName, String lastName) {
// Setting up HBox container for guardian instance
AnchorPane innerContainer = new AnchorPane();
innerContainer.setPrefHeight(50.0);
innerContainer.setPrefWidth(1280.0);
innerContainer.setMinWidth(1280.0);
innerContainer.setStyle("-fx-border-style: solid; -fx-border-color:#b3b3b3;");
Label phone = new Label();
if (t != null) {
phone.setText(t);
} else {
phone.setText("N/A");
}
// Setting up name label
Label fullName = new Label();
if (firstName != null && lastName != null) {
fullName.setText(firstName.concat(" ").concat(lastName));
} else {
fullName.setText("Name Unknown");
}
fullName.setFont(Font.font("Calibri", FontWeight.EXTRA_BOLD, FontPosture.REGULAR, 20));
phone.setFont(Font.font("Calibri", FontWeight.NORMAL, FontPosture.REGULAR, 15));
phone.setStyle("-fx-color-label-visible: #4bc190");
innerContainer.getChildren().addAll(fullName,phone);
fullName.setLayoutX(400.0);
fullName.setLayoutY(15.0);
phone.setLayoutX(800.0);
phone.setLayoutY(17.5);
container.getChildren().add(innerContainer);
}
/**
* Makes use of the injectTelephone function to dynamically generate the entries belonging to Staff members that can be found in the Telephone Menu.
* Iterates through the entire 'staff' table, and for each instance, it injects a Pane element into the view with injectTelephone.
*/
public void retrieveSTelephones() {
connection();
try {
preparedStatement = connect.prepareStatement("SELECT " +
"staff.first_name, " +
"staff.last_name, " +
"telephone.telephone_number " +
"FROM daycare.telephone " +
"JOIN daycare.staff " +
"USING (staff_ID)" +
"ORDER BY first_name");
resultSet = preparedStatement.executeQuery();
while(resultSet.next()) {
injectTelephone(
resultSet.getString("telephone_number"),
resultSet.getString("first_name"),
resultSet.getString("last_name")
);
}
} catch (SQLException e) {
e.printStackTrace();
} finally {
closeConnection();
}
}
/**
* Makes use of the injectTelephone function to dynamically generate the entries belonging to guardians that can be found in the Telephone Menu.
* Iterates through the entire 'guardian' table, and for each instance, it injects a Pane element into the view with injectTelephone.
*/
public void retrieveGTelephones() {
connection();
try {
preparedStatement = connect.prepareStatement("SELECT " +
"guardian.first_name, " +
"guardian.last_name, " +
"telephone.telephone_number " +
"FROM daycare.telephone " +
"JOIN daycare.guardian " +
"USING (guardian_ID)" +
"ORDER BY first_name");
resultSet = preparedStatement.executeQuery();
while(resultSet.next()) {
injectTelephone(
resultSet.getString("telephone_number"),
resultSet.getString("first_name"),
resultSet.getString("last_name")
);
}
} catch (SQLException e) {
e.printStackTrace();
} finally {
closeConnection();
}
}
@Override
public void initialize(URL url, ResourceBundle resourceBundle) {
// Set the username and image on the navbar to the logged user stored in the ConnectedUser singleton.
// This should probably be a function, but I won't change too much "a posteriori" not to impact the accuracy of the code review.
userName.setText(Utilities.ConnectedUser.getConnectedUser().getFirstName().concat(" ").concat(Utilities.ConnectedUser.getConnectedUser().getLastName()));
userImage.setImage(new Image(Utilities.ConnectedUser.getConnectedUser().getImage()));
// Inject telephone numbers for each instance in the database.
retrieveGTelephones();
retrieveSTelephones();
// Back button functionality and hover effect.
back.setOnAction(new EventHandler<ActionEvent>() {
@Override
public void handle(ActionEvent actionEvent) {
Utilities.changeScene(actionEvent, "main.fxml","Main Menu", null, true, true, 0,0);
}
});
back.setOnMouseEntered(l->{
back.setStyle("-fx-background-color: red;");
});
back.setOnMouseExited(l->{
back.setStyle("-fx-background-color: #4bc190;");
});
}
}
|
package cz.metacentrum.perun.webgui.json.registrarManager;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.json.client.JSONArray;
import com.google.gwt.json.client.JSONNumber;
import com.google.gwt.json.client.JSONObject;
import cz.metacentrum.perun.webgui.client.PerunWebSession;
import cz.metacentrum.perun.webgui.client.resources.PerunEntity;
import cz.metacentrum.perun.webgui.client.resources.Utils;
import cz.metacentrum.perun.webgui.json.JsonCallbackEvents;
import cz.metacentrum.perun.webgui.json.JsonPostClient;
import cz.metacentrum.perun.webgui.model.ApplicationFormItem;
import cz.metacentrum.perun.webgui.model.PerunError;
import java.util.ArrayList;
/**
* Request, which updates form items
*
* @author Pavel Zlamal <256627@mail.muni.cz>
*/
public class UpdateFormItems {
// web session
private PerunWebSession session = PerunWebSession.getInstance();
// URL to call
final String JSON_URL = "registrarManager/updateFormItems";
// custom events
private JsonCallbackEvents events = new JsonCallbackEvents();
// data
private ArrayList<ApplicationFormItem> formItems = new ArrayList<ApplicationFormItem>();
// vo or group
private int id;
private PerunEntity entity;
/**
* Creates a new request
*
* @param entity VO or Group
* @param id ID of entity
*/
public UpdateFormItems(PerunEntity entity, int id) {
this.entity = entity;
this.id = id;
}
/**
* Creates a new request with custom events
*
* @param entity VO or Group
* @param id ID of entity
* @param events Custom events
*/
public UpdateFormItems(PerunEntity entity, int id, JsonCallbackEvents events) {
this.entity = entity;
this.id = id;
this.events = events;
}
/**
* Updates form items in DB by passed list of them
*
* @param formItems
*/
public void updateFormItems(ArrayList<ApplicationFormItem> formItems) {
this.formItems = formItems;;
// test arguments
if(!this.testCreating()){
return;
}
// new events
JsonCallbackEvents newEvents = new JsonCallbackEvents(){
public void onError(PerunError error) {
session.getUiElements().setLogErrorText("Updating form items failed.");
events.onError(error);
};
public void onFinished(JavaScriptObject jso) {
session.getUiElements().setLogSuccessText("Form items updated.");
events.onFinished(jso);
};
public void onLoadingStart() {
events.onLoadingStart();
};
};
// sending data
JsonPostClient jspc = new JsonPostClient(newEvents);
jspc.sendData(JSON_URL, prepareJSONObject());
}
private boolean testCreating() {
// TODO Auto-generated method stub
return true;
}
/**
* Prepares a JSON object.
* @return JSONObject - the whole query
*/
private JSONObject prepareJSONObject()
{
// data to JSON array
JSONArray data = new JSONArray();
for(int i = 0; i<formItems.size(); i++){
// get
JSONObject obj = new JSONObject(formItems.get(i));
// reconstruct
JSONObject newItem = new JSONObject();
newItem.put("id", obj.get("id"));
newItem.put("shortname", obj.get("shortname"));
newItem.put("required", obj.get("required"));
newItem.put("type", obj.get("type"));
newItem.put("federationAttribute", obj.get("federationAttribute"));
newItem.put("perunSourceAttribute", obj.get("perunSourceAttribute"));
newItem.put("perunDestinationAttribute", obj.get("perunDestinationAttribute"));
newItem.put("regex", obj.get("regex"));
newItem.put("appTypes", obj.get("appTypes"));
newItem.put("ordnum", obj.get("ordnum"));
newItem.put("forDelete", obj.get("forDelete"));
newItem.put("applicationTypes", obj.get("applicationTypes"));
newItem.put("updatable", obj.get("updatable"));
newItem.put("disabled", obj.get("disabled"));
newItem.put("hidden", obj.get("hidden"));
newItem.put("hiddenDependencyItemId", obj.get("hiddenDependencyItemId"));
newItem.put("disabledDependencyItemId", obj.get("disabledDependencyItemId"));
// recreate i18n
JSONObject i18n = new JSONObject();
i18n.put("en", new JSONObject(formItems.get(i).getItemTexts("en")));
if (!Utils.getNativeLanguage().isEmpty()) {
i18n.put(Utils.getNativeLanguage().get(0), new JSONObject(formItems.get(i).getItemTexts(Utils.getNativeLanguage().get(0))));
}
newItem.put("i18n", i18n);
data.set(i, newItem);
}
// query
JSONObject query = new JSONObject();
if (PerunEntity.VIRTUAL_ORGANIZATION.equals(entity)) {
query.put("vo", new JSONNumber(id));
} else if (PerunEntity.GROUP.equals(entity)) {
query.put("group", new JSONNumber(id));
}
query.put("items", data);
return query;
}
}
|
package com.elastisys.autoscaler.core.monitoring.systemhistorian.api.types;
import com.elastisys.autoscaler.core.api.types.MetricValue;
import com.elastisys.autoscaler.core.autoscaler.AutoScaler;
import com.elastisys.autoscaler.core.monitoring.systemhistorian.api.SystemHistorian;
import com.elastisys.autoscaler.core.prediction.api.PredictionSubsystem;
import com.elastisys.autoscaler.core.prediction.api.types.PredictionUnit;
import com.elastisys.autoscaler.core.prediction.impl.standard.api.Predictor;
import com.elastisys.scale.cloudpool.api.CloudPool;
import com.elastisys.scale.cloudpool.api.types.MachineState;
import com.elastisys.scale.commons.eventbus.EventBus;
/**
* An enumeration containing all {@link AutoScaler} system metrics that are
* being tracked in time-series by the {@link SystemHistorian}.
* <p/>
* These values are intended for use as metric in {@link MetricValue}s reported
* to the {@link SystemHistorian} by posting {@link SystemMetricEvent}s on the
* {@link AutoScaler}'s {@link EventBus}.
*
* @see SystemMetricEvent
*/
public enum SystemMetric {
/**
* A metric used to report the current load as approximated by a certain
* {@link Predictor}.
* <p/>
* Note: tags should be used on the {@link SystemMetricEvent} to
* differentiate values reported by different {@link AutoScaler} instances,
* as well as to differentiate the reporting {@link Predictor}, to specify
* which metric the load observation concerns, etc.
*/
CURRENT_LOAD("autoscaler.current.load"),
/**
* A metric used to report capacity predictions made by {@link Predictor}s.
* <p/>
* Note: tags can be used on the {@link SystemMetricEvent} to differentiate
* values reported by different {@link AutoScaler} instances and
* {@link Predictor}s and to specify the predicted metric, the
* {@link PredictionUnit}, etc.
*/
PREDICTION("autoscaler.prediction"),
/**
* A metric used to report <i>aggregate predictions</i> produced by the
* {@link PredictionSubsystem}.
* <p/>
* Note: tags can be used on the {@link SystemMetricEvent} to differentiate
* values reported by different {@link AutoScaler} instances.
*/
AGGREGATE_PREDICTION("autoscaler.prediction.aggregate"),
/**
* A metric used to report <i>bounded predictions</i> produced by the
* {@link PredictionSubsystem}.
* <p/>
* Note: tags can be used on the {@link SystemMetricEvent} to differentiate
* values reported by different {@link AutoScaler} instances.
*/
BOUNDED_PREDICTION("autoscaler.prediction.bounded"),
/**
* A metric used to report a compute-unit prediction made during a resize
* iteration.
* <p/>
* Note: tags can be used on the {@link SystemMetricEvent} to differentiate
* values reported by different {@link AutoScaler} instances.
*/
COMPUTE_UNIT_PREDICTION("autoscaler.prediction.computeunit"),
/**
* A metric used to report the minimum capacity limit active at a certain
* point in time.
* <p/>
* Note: tags can be used on the {@link SystemMetricEvent} to differentiate
* values set by different capacity limits.
*/
MIN_CAPACITY_LIMIT("autoscaler.capacity.limit.min"),
/**
* A metric used to report the maximum capacity limit active at a certain
* point in time.
* <p/>
* Note: tags can be used on the {@link SystemMetricEvent} to differentiate
* values set by different capacity limits.
*/
MAX_CAPACITY_LIMIT("autoscaler.capacity.limit.max"),
/**
* A metric used to report the current size of the managed {@link CloudPool}
* . A separate value will be reported for every valid {@link MachineState}
* with the number of current pool members in that particular state. The
* particular {@link MachineState} to which a given reading belongs can be
* read from the {@code machineState} tag.
* <p/>
* Note: tags should be used on the {@link SystemMetricEvent} to
* differentiate values reported by different {@link AutoScaler} instances,
* as well as to differentiate the reporting {@link Predictor}, to specify
* which metric the load observation concerns, etc.
*/
CLOUDPOOL_SIZE("autoscaler.cloudpool.size"),
/**
* A metric used to report that the size of the managed {@link CloudPool}
* has changed.
* <p/>
* Note: tags can be used on the {@link SystemMetricEvent} to differentiate
* values reported by different {@link AutoScaler} instances.
*/
CLOUDPOOL_SIZE_CHANGED("autoscaler.cloudpool.size.changed");
private final String metricName;
private SystemMetric(String metricName) {
this.metricName = metricName;
}
/**
* Returns the textual representation of this {@link SystemMetric}.
*
* @return
*/
public String getMetricName() {
return this.metricName;
}
}
|
package ufc.rest.response;
public class UserAuthenticationResponse extends ResponseMessage {
private String token;
protected UserAuthenticationResponse() {
}
public UserAuthenticationResponse(String token) {
this.token = token;
}
public String getToken() {
return token;
}
public void setToken(String token) {
this.token = token;
}
}
|
package org.jgroups.demos;
import org.jgroups.JChannel;
import org.jgroups.Receiver;
import org.jgroups.View;
import org.jgroups.util.Util;
/**
* Demos the reception of views using a PullPushAdapter. Just start a number of members, and kill them
* randomly. The view should always be correct.
*/
public class ViewDemo implements Receiver {
private JChannel channel;
public void viewAccepted(View new_view) {
System.out.println("** New view: " + new_view);
}
public void start(String props) throws Exception {
channel=new JChannel(props);
channel.setReceiver(this);
channel.connect("ViewDemo");
while(true) {
Util.sleep(10000);
}
}
public static void main(String args[]) {
ViewDemo t=new ViewDemo();
String props="udp.xml";
for(int i=0; i < args.length; i++) {
if("-help".equals(args[i])) {
help();
return;
}
if("-props".equals(args[i])) {
props=args[++i];
continue;
}
help();
return;
}
try {
t.start(props);
}
catch(Exception e) {
e.printStackTrace();
}
}
static void help() {
System.out.println("ViewDemo [-props <properties>] [-help] [-use_additional_data <flag>]");
}
}
|
package org.docksidestage.hangar.dbflute.exentity.customize;
import org.docksidestage.hangar.dbflute.bsentity.customize.BsForcedType;
/**
* The entity of ForcedType.
* <p>
* You can implement your original methods here.
* This class remains when re-generating.
* </p>
* @author DBFlute(AutoGenerator)
*/
public class ForcedType extends BsForcedType {
/** The serial version UID for object serialization. (Default) */
private static final long serialVersionUID = 1L;
}
|
package it.blackhat.symposium.actions.tag;
import it.blackhat.symposium.actions.Action;
import it.blackhat.symposium.managers.TagManager;
import it.blackhat.symposium.managers.TagModelManager;
import it.blackhat.symposium.models.Tag;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.sql.SQLException;
import java.util.List;
import javax.sql.DataSource;
/**
* This action finds all tags of a question
*/
public class RetrieveQuestionTagsAction implements Action {
private final TagManager tagManager;
private final Log retrieveQuestionTagsLog = LogFactory.getLog(RetrieveQuestionTagsAction.class);
/**
* Create the action
* @param ds the datasource object
*/
public RetrieveQuestionTagsAction(DataSource ds) {
super();
this.tagManager = new TagModelManager(ds);
}
@Override
public String execute(HttpServletRequest req, HttpServletResponse res) {
try {
String questionId = req.getParameter("questionId");
int questionIdInt = Integer.parseInt(questionId);
List<Tag> tags = tagManager.retrieveQuestionTags(questionIdInt);
req.setAttribute("tags", tags);
req.setAttribute("questionId", questionIdInt);
return "/editTag.jsp";
} catch (SQLException e) {
retrieveQuestionTagsLog.error("Errore interno", e);
return "/error500.jsp";
}
}
}
|
/*
* $Header$
* $Revision$
* $Date$
*
* ====================================================================
*
* Copyright 2000-2002 bob mcwhirter & James Strachan.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of the Jaxen Project nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* ====================================================================
* This software consists of voluntary contributions made by many
* individuals on behalf of the Jaxen Project and was originally
* created by bob mcwhirter <bob@werken.com> and
* James Strachan <jstrachan@apache.org>. For more information on the
* Jaxen Project, please see <http://www.jaxen.org/>.
*
* $Id$
*/
package org.jaxen.function;
import java.util.List;
import org.jaxen.Context;
import org.jaxen.Function;
import org.jaxen.FunctionCallException;
import org.jaxen.Navigator;
/**
* <p><b>4.4</b> <code><i>number</i> ceiling(<i>number</i>)</code></p>
* <blockquote cite="http://www.w3.org/TR/xpath">
*
* <p>The ceiling function returns the smallest
* (closest to negative infinity) number that is not less
* than the argument and that is an integer....If the argument
* is NaN, then NaN is returned. If the argument is positive infinity,
* then positive infinity is returned. If the argument is negative infinity,
* then negative infinity is returned. If the argument is positive zero,
* then positive zero is returned.
* If the argument is negative zero, then negative zero is returned.
* If the argument is less than zero, but greater than -1,
* then negative zero is returned.
* </p></blockquote>
*
* @author bob mcwhirter (bob @ werken.com)
*
* @see <a href="https://www.w3.org/TR/xpath#function-ceiling">Section 4.4 of the XPath Specification</a>
* @see <a href="https://www.w3.org/1999/11/REC-xpath-19991116-errata/">XPath Specification Errata</a>
*
*/
public class CeilingFunction implements Function
{
/**
* Create a new <code>CeilingFunction</code> object.
*/
public CeilingFunction() {}
/** Returns the smallest integer greater than or equal to a number.
*
* @param context the context at the point in the
* expression when the function is called
* @param args a list with exactly one item which will be converted to a
* <code>Double</code> as if by the XPath <code>number()</code> function
*
* @return a <code>Double</code> containing the smallest integer greater than or equal
* <code>args.get(0)</code>
*
* @throws FunctionCallException if <code>args</code> has more or less than one item
*/
public Object call(Context context,
List args) throws FunctionCallException
{
if (args.size() == 1)
{
return evaluate( args.get(0),
context.getNavigator() );
}
throw new FunctionCallException("ceiling() requires one argument.");
}
/** Returns the smallest integer greater than or equal to the argument.
* If necessary, the argument is first converted to a <code>Double</code>
* as if by the XPath <code>number()</code> function.
*
* @param obj the object whose ceiling is returned
* @param nav ignored
*
* @return a <code>Double</code> containing the smallest integer
* greater than or equal to <code>obj</code>
*/
public static Double evaluate(Object obj,
Navigator nav)
{
Double value = NumberFunction.evaluate( obj,
nav );
return new Double( Math.ceil( value.doubleValue() ) );
}
}
|
/*
* Copyright 2015-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.bgpio.protocol;
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.buffer.ChannelBuffers;
import org.junit.Test;
import org.onosproject.bgpio.exceptions.BgpParseException;
import org.onosproject.bgpio.types.BgpHeader;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.core.Is.is;
/**
* Test cases for BGP Open Message.
*/
public class BgpOpenMsgTest {
/**
* This test case checks open message without optional parameter.
*/
@Test
public void openMessageTest1() throws BgpParseException {
//Open message without optional parameter
byte[] openMsg = new byte[] {(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff,
0x00, 0x1d, 0x01, 0X04, (byte) 0xfe, 0x09, 0x00,
(byte) 0xb4, (byte) 0xc0, (byte) 0xa8, 0x00, 0x0f,
0x00};
byte[] testOpenMsg;
ChannelBuffer buffer = ChannelBuffers.dynamicBuffer();
buffer.writeBytes(openMsg);
BgpMessageReader<BgpMessage> reader = BgpFactories.getGenericReader();
BgpMessage message;
BgpHeader bgpHeader = new BgpHeader();
message = reader.readFrom(buffer, bgpHeader);
assertThat(message, instanceOf(BgpOpenMsg.class));
ChannelBuffer buf = ChannelBuffers.dynamicBuffer();
message.writeTo(buf);
int readLen = buf.writerIndex();
testOpenMsg = new byte[readLen];
buf.readBytes(testOpenMsg, 0, readLen);
assertThat(testOpenMsg, is(openMsg));
}
/**
* This test case checks open message with Multiprotocol extension
* capability.
*/
@Test
public void openMessageTest2() throws BgpParseException {
// OPEN Message (MultiProtocolExtension-CAPABILITY).
byte[] openMsg = new byte[] {(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, 0x00, 0x25,
0x01, //BGP Header
0X04, //Version
(byte) 0x00, (byte) 0xc8, // AS Number
0x00, (byte) 0xb4, // Hold time
(byte) 0xb6, (byte) 0x02, 0x5d,
(byte) 0xc8, // BGP Identifier
0x08, 0x02, 0x06, // Opt Parameter length
0x01, 0x04, 0x00, 0x00, 0x00, (byte) 0xc8}; // Multiprotocol CAPABILITY
byte[] testOpenMsg;
ChannelBuffer buffer = ChannelBuffers.dynamicBuffer();
buffer.writeBytes(openMsg);
BgpMessageReader<BgpMessage> reader = BgpFactories.getGenericReader();
BgpMessage message;
BgpHeader bgpHeader = new BgpHeader();
message = reader.readFrom(buffer, bgpHeader);
assertThat(message, instanceOf(BgpOpenMsg.class));
ChannelBuffer buf = ChannelBuffers.dynamicBuffer();
message.writeTo(buf);
int readLen = buf.writerIndex();
testOpenMsg = new byte[readLen];
buf.readBytes(testOpenMsg, 0, readLen);
assertThat(testOpenMsg, is(openMsg));
}
/**
* This test case checks open message with Four-octet AS number
* capability.
*/
@Test
public void openMessageTest3() throws BgpParseException {
// OPEN Message (Four-Octet AS number capability).
byte[] openMsg = new byte[] {(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, 0x00, 0x25,
0x01, //BGPHeader
0X04, //Version
(byte) 0x00, (byte) 0xc8, //AS Number
0x00, (byte) 0xb4, //Hold Time
(byte) 0xb6, (byte) 0x02, 0x5d,
(byte) 0xc8, //BGP Identifier
0x08, 0x02, 0x06, //Opt Parameter Length
0x41, 0x04, 0x00, 0x01, 0x00, 0x01}; //Four Octet AS Number-CAPABILITY-TLV
byte[] testOpenMsg;
ChannelBuffer buffer = ChannelBuffers.dynamicBuffer();
buffer.writeBytes(openMsg);
BgpMessageReader<BgpMessage> reader = BgpFactories.getGenericReader();
BgpMessage message;
BgpHeader bgpHeader = new BgpHeader();
message = reader.readFrom(buffer, bgpHeader);
assertThat(message, instanceOf(BgpOpenMsg.class));
ChannelBuffer buf = ChannelBuffers.dynamicBuffer();
message.writeTo(buf);
int readLen = buf.writerIndex();
testOpenMsg = new byte[readLen];
buf.readBytes(testOpenMsg, 0, readLen);
assertThat(testOpenMsg, is(openMsg));
}
/**
* This test case checks open message with capabilities.
*/
@Test
public void openMessageTest4() throws BgpParseException {
// OPEN Message with capabilities.
byte[] openMsg = new byte[] {(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, 0x00, 0x2b,
0x01, //BGPHeader
0X04, //Version
(byte) 0x00, (byte) 0xc8, //AS Number
0x00, (byte) 0xb4, //Hold Time
(byte) 0xb6, (byte) 0x02, 0x5d, (byte) 0xc8, //BGP Identifier
0x0e, 0x02, 0x0c, //Opt Parameter Length
0x01, 0x04, 0x00, 0x00, 0x00, (byte) 0xc8, // Multiprotocol extension capability
0x41, 0x04, 0x00, 0x01, 0x00, 0x01}; //Four Octet AS Number-CAPABILITY-TLV
byte[] testOpenMsg;
ChannelBuffer buffer = ChannelBuffers.dynamicBuffer();
buffer.writeBytes(openMsg);
BgpMessageReader<BgpMessage> reader = BgpFactories.getGenericReader();
BgpMessage message;
BgpHeader bgpHeader = new BgpHeader();
message = reader.readFrom(buffer, bgpHeader);
assertThat(message, instanceOf(BgpOpenMsg.class));
ChannelBuffer buf = ChannelBuffers.dynamicBuffer();
message.writeTo(buf);
int readLen = buf.writerIndex();
testOpenMsg = new byte[readLen];
buf.readBytes(testOpenMsg, 0, readLen);
assertThat(testOpenMsg, is(openMsg));
}
/**
* In this test case, Invalid version is given as input and expecting
* an exception.
*/
@Test(expected = BgpParseException.class)
public void openMessageTest5() throws BgpParseException {
// OPEN Message with invalid version number.
byte[] openMsg = new byte[] {(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, 0x00, 0x1d, 0x01, 0X05,
(byte) 0xfe, 0x09, 0x00, (byte) 0xb4,
(byte) 0xc0, (byte) 0xa8, 0x00, 0x0f,
0x00};
ChannelBuffer buffer = ChannelBuffers.dynamicBuffer();
buffer.writeBytes(openMsg);
BgpMessageReader<BgpMessage> reader = BgpFactories.getGenericReader();
BgpMessage message;
BgpHeader bgpHeader = new BgpHeader();
message = reader.readFrom(buffer, bgpHeader);
assertThat(message, instanceOf(BgpOpenMsg.class));
}
/**
* In this test case, Marker is set as 0 in input and expecting
* an exception.
*/
@Test(expected = BgpParseException.class)
public void openMessageTest6() throws BgpParseException {
// OPEN Message with marker set to 0.
byte[] openMsg = new byte[] {(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0x00, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
0x00, 0x00, 0x1d, 0x01, 0X04,
(byte) 0xfe, 0x09, 0x00, (byte) 0xb4,
(byte) 0xc0, (byte) 0xa8, 0x00, 0x0f,
0x00};
ChannelBuffer buffer = ChannelBuffers.dynamicBuffer();
buffer.writeBytes(openMsg);
BgpMessageReader<BgpMessage> reader = BgpFactories.getGenericReader();
BgpMessage message;
BgpHeader bgpHeader = new BgpHeader();
message = reader.readFrom(buffer, bgpHeader);
assertThat(message, instanceOf(BgpOpenMsg.class));
}
/**
* In this test case, Invalid message length is given as input and expecting
* an exception.
*/
@Test(expected = BgpParseException.class)
public void openMessageTest7() throws BgpParseException {
// OPEN Message with invalid header length.
byte[] openMsg = new byte[] {(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, 0x00, 0x1e, 0x01, 0X04,
(byte) 0xfe, 0x09, 0x00, (byte) 0xb4,
(byte) 0xc0, (byte) 0xa8, 0x00, 0x0f,
0x00};
ChannelBuffer buffer = ChannelBuffers.dynamicBuffer();
buffer.writeBytes(openMsg);
BgpMessageReader<BgpMessage> reader = BgpFactories.getGenericReader();
BgpMessage message;
BgpHeader bgpHeader = new BgpHeader();
message = reader.readFrom(buffer, bgpHeader);
assertThat(message, instanceOf(BgpOpenMsg.class));
}
/**
* In this test case, Invalid message type is given as input and expecting
* an exception.
*/
@Test(expected = BgpParseException.class)
public void openMessageTest8() throws BgpParseException {
// OPEN Message with invalid message type.
byte[] openMsg = new byte[] {(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, 0x00, 0x1d, 0x05, 0X04,
(byte) 0xfe, 0x09, 0x00, (byte) 0xb4, (byte) 0xc0, (byte) 0xa8, 0x00, 0x0f, 0x00 };
ChannelBuffer buffer = ChannelBuffers.dynamicBuffer();
buffer.writeBytes(openMsg);
BgpMessageReader<BgpMessage> reader = BgpFactories.getGenericReader();
BgpMessage message;
BgpHeader bgpHeader = new BgpHeader();
message = reader.readFrom(buffer, bgpHeader);
assertThat(message, instanceOf(BgpOpenMsg.class));
}
/**
* This test case checks open message with route policy distribution capability.
*/
@Test
public void openMessageTest9() throws BgpParseException {
// OPEN Message with capabilities.
byte[] openMsg = new byte[] {(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff,
0x00, 0x3d, 0x01, 0x04, 0x00, (byte) 0xc8, 0x00, (byte) 0xb4, (byte) 0xc0,
(byte) 0xa8, 0x07, 0x35, 0x20, 0x02, 0x1e, 0x01,
0x04, 00, 0x01, 0x00, 0x01, 0x41, 0x04, 0x00, 0x00, 0x00, (byte) 0xc8, 0x01,
0x04, 0x40, 0x04, 0x00, 0x47, 0x01, 0x04, 0x00, 0x01, 0x00, (byte) 0x85,
(byte) 0x81, 0x04, 0x00, 0x01, (byte) 0x85, 0x01 }; //RPD capability
byte[] testOpenMsg;
ChannelBuffer buffer = ChannelBuffers.dynamicBuffer();
buffer.writeBytes(openMsg);
BgpMessageReader<BgpMessage> reader = BgpFactories.getGenericReader();
BgpMessage message;
BgpHeader bgpHeader = new BgpHeader();
message = reader.readFrom(buffer, bgpHeader);
assertThat(message, instanceOf(BgpOpenMsg.class));
ChannelBuffer buf = ChannelBuffers.dynamicBuffer();
message.writeTo(buf);
int readLen = buf.writerIndex();
testOpenMsg = new byte[readLen];
buf.readBytes(testOpenMsg, 0, readLen);
assertThat(testOpenMsg, is(openMsg));
}
/**
* In this test case, Invalid multiprotocol capability length is given as input and expecting an exception.
*/
@Test(expected = BgpParseException.class)
public void openMessageTest10() throws BgpParseException {
// OPEN Message with invalid message type.
byte[] openMsg = new byte[] {(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff,
(byte) 0xff, (byte) 0xff, (byte) 0xff, (byte) 0xff,
0x00, 0x3d, 0x01, 0x04, 0x00, (byte) 0xc8, 0x00, (byte) 0xb4, (byte) 0xc0,
(byte) 0xa8, 0x07, 0x35, 0x20, 0x02, 0x1e, 0x01, 0x04, 00, 0x01, 0x00, 0x01, 0x41,
0x04, 0x00, 0x00, 0x00, (byte) 0xc8, 0x01, 0x04, 0x40, 0x04, 0x00,
0x47, 0x01, 0x04, 0x00, 0x01, 0x00, (byte) 0x85,
(byte) 0x81, 0x05, 0x00, 0x01, (byte) 0x85, 0x01 }; //RPD capability
ChannelBuffer buffer = ChannelBuffers.dynamicBuffer();
buffer.writeBytes(openMsg);
BgpMessageReader<BgpMessage> reader = BgpFactories.getGenericReader();
BgpMessage message;
BgpHeader bgpHeader = new BgpHeader();
message = reader.readFrom(buffer, bgpHeader);
assertThat(message, instanceOf(BgpOpenMsg.class));
}
}
|
package com.captstudios.games.tafl.core.es.model.rules;
import com.badlogic.gdx.utils.Array;
import com.captstudios.games.tafl.core.enums.DrawReasonEnum;
import com.captstudios.games.tafl.core.es.model.TaflMatchObserver;
import com.captstudios.games.tafl.core.es.model.ai.optimization.BitBoard;
import com.captstudios.games.tafl.core.es.model.ai.optimization.moves.Move;
import com.captstudios.games.tafl.core.es.model.ai.optimization.moves.RulesChecker;
public interface RulesEngine extends TaflMatchObserver, RulesChecker {
public int getFirstTurn();
public BitBoard getCapturedPieces(Move move);
public int checkWinner();
public DrawReasonEnum checkDraw(int team);
public BitBoard getLegalMoves(int team, int source);
public Array<Move> allLegalMoves(int team);
public boolean isMoveLegal(int team, int source, int destination);
public boolean isVulnerable(int team, int cellId);
public boolean teamCanMoveToLocation(int team, int cellId);
}
|
package com.knits.kncare.controller;
import com.fasterxml.jackson.annotation.JsonView;
import com.knits.kncare.dto.EmailDto;
import com.knits.kncare.dto.MemberDto;
import com.knits.kncare.dto.Views;
import com.knits.kncare.dto.search.MemberSearchDto;
import com.knits.kncare.model.Member;
import com.knits.kncare.repository.EmployeeRepository;
import com.knits.kncare.service.MemberService;
import io.swagger.v3.oas.annotations.Operation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
@RequestMapping("/api/v1/members")
@RestController
@JsonView(Views.Common.class)
@Slf4j
public class MemberController {
private final MemberService service;
private final EmployeeRepository repository;
@Autowired
public MemberController(MemberService service, EmployeeRepository repository) {
this.service = service;
this.repository = repository;
}
@Operation(summary = "find a care member by id")
@GetMapping("{id}")
@JsonView(Views.MemberDetails.class)
public ResponseEntity<MemberDto> getMemberById(@PathVariable("id") long id) {
return new ResponseEntity<>(service.getById(id), HttpStatus.OK);
}
@Operation(summary = "create a care member")
@PostMapping
@JsonView(Views.MemberDetails.class)
public ResponseEntity<MemberDto> createMember(@RequestBody MemberDto memberDto) {
try {
return new ResponseEntity<>(service.add(memberDto), HttpStatus.CREATED);
} catch (Exception e) {
log.error(e.getMessage());
return new ResponseEntity<>(null, HttpStatus.INTERNAL_SERVER_ERROR);
}
}
@Operation(summary = "update a Member")
@PutMapping("{id}")
public ResponseEntity<MemberDto> updateMember(@PathVariable("id") long id, @RequestBody MemberDto memberDto) {
return new ResponseEntity<>(service.update(id, memberDto), HttpStatus.OK);
}
@Operation(summary = "delete a care member by id")
@DeleteMapping("{id}")
public ResponseEntity<HttpStatus> deleteMember(@PathVariable("id") long id) {
try {
service.delete(id);
return new ResponseEntity<>(HttpStatus.NO_CONTENT);
} catch (Exception e) {
return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR);
}
}
@Operation(summary = "delete all care members")
@DeleteMapping
public ResponseEntity<HttpStatus> deleteAllMembers() {
try {
service.deleteAll();
return new ResponseEntity<>(HttpStatus.NO_CONTENT);
} catch (Exception e) {
return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR);
}
}
@Operation(summary = "find care members by one of its model fields")
@GetMapping
@JsonView(Views.Common.class)
public ResponseEntity<Page<MemberDto>> searchMembers(MemberSearchDto memberSearchDto) {
try {
return new ResponseEntity<>(service.search(memberSearchDto), HttpStatus.OK);
} catch (Exception e) {
return new ResponseEntity<>(null, HttpStatus.INTERNAL_SERVER_ERROR);
}
}
}
|
package cn.hutool.json;
import java.io.Serializable;
/**
* JSON配置项
*
* @author looly
* @since 4.1.19
*/
public class JSONConfig implements Serializable {
private static final long serialVersionUID = 119730355204738278L;
/**
* 是否有序,顺序按照加入顺序排序
*/
private boolean order;
/**
* 是否忽略转换过程中的异常
*/
private boolean ignoreError;
/**
* 是否忽略键的大小写
*/
private boolean ignoreCase;
/**
* 日期格式,null表示默认的时间戳
*/
private String dateFormat;
/**
* 是否忽略null值
*/
private boolean ignoreNullValue = true;
/**
* 是否支持transient关键字修饰和@Transient注解,如果支持,被修饰的字段或方法对应的字段将被忽略。
*/
private boolean transientSupport = true;
/**
* 是否去除末尾多余0,例如如果为true,5.0返回5
*/
private boolean stripTrailingZeros = true;
/**
* 创建默认的配置项
*
* @return JSONConfig
*/
public static JSONConfig create() {
return new JSONConfig();
}
/**
* 是否有序,顺序按照加入顺序排序
*
* @return 是否有序
*/
public boolean isOrder() {
return order;
}
/**
* 设置是否有序,顺序按照加入顺序排序
*
* @param order 是否有序
* @return this
*/
public JSONConfig setOrder(boolean order) {
this.order = order;
return this;
}
/**
* 是否忽略转换过程中的异常
*
* @return 是否忽略转换过程中的异常
*/
public boolean isIgnoreError() {
return ignoreError;
}
/**
* 设置是否忽略转换过程中的异常
*
* @param ignoreError 是否忽略转换过程中的异常
* @return this
*/
public JSONConfig setIgnoreError(boolean ignoreError) {
this.ignoreError = ignoreError;
return this;
}
/**
* 是否忽略键的大小写
*
* @return 是否忽略键的大小写
*/
public boolean isIgnoreCase() {
return ignoreCase;
}
/**
* 设置是否忽略键的大小写
*
* @param ignoreCase 是否忽略键的大小写
* @return this
*/
public JSONConfig setIgnoreCase(boolean ignoreCase) {
this.ignoreCase = ignoreCase;
return this;
}
/**
* 日期格式,null表示默认的时间戳
*
* @return 日期格式,null表示默认的时间戳
*/
public String getDateFormat() {
return dateFormat;
}
/**
* 设置日期格式,null表示默认的时间戳<br>
* 此方法设置的日期格式仅对转换为JSON字符串有效,对解析JSON为bean无效。
*
* @param dateFormat 日期格式,null表示默认的时间戳
* @return this
*/
public JSONConfig setDateFormat(String dateFormat) {
this.dateFormat = dateFormat;
return this;
}
/**
* 是否忽略null值
*
* @return 是否忽略null值
*/
public boolean isIgnoreNullValue() {
return this.ignoreNullValue;
}
/**
* 设置是否忽略null值
*
* @param ignoreNullValue 是否忽略null值
* @return this
*/
public JSONConfig setIgnoreNullValue(boolean ignoreNullValue) {
this.ignoreNullValue = ignoreNullValue;
return this;
}
/**
* 是否支持transient关键字修饰和@Transient注解,如果支持,被修饰的字段或方法对应的字段将被忽略。
*
* @return 是否支持
* @since 5.4.2
*/
public boolean isTransientSupport() {
return this.transientSupport;
}
/**
* 设置是否支持transient关键字修饰和@Transient注解,如果支持,被修饰的字段或方法对应的字段将被忽略。
*
* @param transientSupport 是否支持
* @return this
* @since 5.4.2
*/
public JSONConfig setTransientSupport(boolean transientSupport) {
this.transientSupport = transientSupport;
return this;
}
/**
* 是否去除末尾多余0,例如如果为true,5.0返回5
*
* @return 是否去除末尾多余0,例如如果为true,5.0返回5
* @since 5.6.2
*/
public boolean isStripTrailingZeros() {
return stripTrailingZeros;
}
/**
* 设置是否去除末尾多余0,例如如果为true,5.0返回5
*
* @param stripTrailingZeros 是否去除末尾多余0,例如如果为true,5.0返回5
* @return this
* @since 5.6.2
*/
public JSONConfig setStripTrailingZeros(boolean stripTrailingZeros) {
this.stripTrailingZeros = stripTrailingZeros;
return this;
}
}
|
package frontend.slogo.team04;
import javafx.beans.property.BooleanProperty;
import javafx.scene.Node;
import javafx.scene.control.Button;
import javafx.scene.control.ContentDisplay;
/**
* Created data structure to create a Toggle (works closely with Boolean Property)
* @author Ryan St Pierre
*/
public class ToggleButton extends Button {
BooleanProperty value;
public ToggleButton(BooleanProperty value) {
super();
this.value = value;
this.setOnMouseClicked(e-> toggle());
setState();
}
public ToggleButton(String text) {
super(text);
}
public ToggleButton(String text, Node graphic) {
super(text, graphic);
}
private void toggle() {
changeValue();
setState();
}
private void setState() {
if(value.get()) {
setText("ON");
setStyle("-fx-background-color: green;-fx-text-fill:white;");
setContentDisplay(ContentDisplay.LEFT);
} else {
setText("OFF");
setStyle("-fx-background-color: grey;-fx-text-fill:black;");
setContentDisplay(ContentDisplay.RIGHT);
}
}
private void changeValue() {
value.set(!value.get());
}
}
|
package org.batfish.common.plugin;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.SortedMap;
import org.batfish.common.traceroute.TraceDag;
import org.batfish.common.util.CollectionUtil;
import org.batfish.datamodel.Flow;
import org.batfish.datamodel.flow.FirewallSessionTraceInfo;
import org.batfish.datamodel.flow.Trace;
import org.batfish.datamodel.flow.TraceAndReverseFlow;
/**
* Indicates ability to process a set of {@link Flow} objects and return a set of {@link Trace},
* performing a traceroute.
*/
public interface TracerouteEngine {
/**
* Builds the {@link Trace}s for a {@link Set} of {@link Flow}s
*
* @param flows {@link Set} of {@link Flow} for which {@link Trace}s are to be found
* @param ignoreFilters if true, will ignore ACLs
* @return {@link SortedMap} of {@link Flow}s to {@link List} of {@link Trace}s
*/
default SortedMap<Flow, List<Trace>> computeTraces(Set<Flow> flows, boolean ignoreFilters) {
return CollectionUtil.toImmutableSortedMap(
computeTracesAndReverseFlows(flows, ignoreFilters),
Entry::getKey,
entry ->
entry.getValue().stream()
.map(TraceAndReverseFlow::getTrace)
.collect(ImmutableList.toImmutableList()));
}
/**
* Computes {@link Trace Traces} with reverse-direction {@link Flow Flows} for a {@link Set} of
* forward {@link Flow Flows}.
*
* @param flows {@link Set} of {@link Flow} for which {@link Trace Traces} are to be found
* @param ignoreFilters if true, will ignore ACLs
* @return {@link SortedMap} of {@link Flow Flows} to {@link List} of {@link Trace Traces}
*/
default SortedMap<Flow, List<TraceAndReverseFlow>> computeTracesAndReverseFlows(
Set<Flow> flows, boolean ignoreFilters) {
return computeTracesAndReverseFlows(flows, ImmutableSet.of(), ignoreFilters);
}
/**
* Computes {@link Trace Traces} with reverse-direction {@link Flow Flows} for a {@link Set} of
* forward {@link Flow Flows}.
*
* @param flows {@link Set} of {@link Flow} for which {@link Trace Traces} are to be found
* @param ignoreFilters if true, will ignore ACLs
* @return {@link SortedMap} of {@link Flow Flows} to {@link List} of {@link Trace Traces}
*/
SortedMap<Flow, List<TraceAndReverseFlow>> computeTracesAndReverseFlows(
Set<Flow> flows, Set<FirewallSessionTraceInfo> sessions, boolean ignoreFilters);
/** Computes {@link TraceDag} for a {@link Set} of forward {@link Flow Flows}. */
Map<Flow, TraceDag> computeTraceDags(
Set<Flow> flows, Set<FirewallSessionTraceInfo> sessions, boolean ignoreFilters);
}
|
package com.central.gateway.filter.pre;
import com.central.common.utils.AddrUtil;
import com.central.log.monitor.PointUtil;
import com.netflix.zuul.ZuulFilter;
import com.netflix.zuul.context.RequestContext;
import eu.bitwalker.useragentutils.UserAgent;
import lombok.extern.slf4j.Slf4j;
import org.springframework.cloud.netflix.zuul.filters.support.FilterConstants;
import org.springframework.stereotype.Component;
import javax.servlet.http.HttpServletRequest;
/**
* 请求统计分析埋点过滤器
*
* @author zlt
* @date 2019/5/6
*/
@Slf4j
@Component
public class RequestStatisticsFilter extends ZuulFilter {
@Override
public String filterType() {
return FilterConstants.PRE_TYPE;
}
@Override
public int filterOrder() {
return 0;
}
@Override
public boolean shouldFilter() {
return true;
}
@Override
public Object run() {
RequestContext ctx = RequestContext.getCurrentContext();
HttpServletRequest req = ctx.getRequest();
UserAgent userAgent = UserAgent.parseUserAgentString(req.getHeader("User-Agent"));
//埋点
PointUtil.debug("0", "request-statistics",
"ip=" + AddrUtil.getRemoteAddr(req)
+ "&browser=" + userAgent.getBrowser()
+ "&operatingSystem=" + userAgent.getOperatingSystem());
return null;
}
}
|
package com.xian.mall.product.service;
import com.baomidou.mybatisplus.extension.service.IService;
import com.xian.common.utils.PageUtils;
import com.xian.mall.product.entity.SkuInfoEntity;
import java.util.Map;
/**
* sku信息
*
* @author lishouxian
* @email li.shouxian@outlook.com
* @date 2020-09-15 19:22:23
*/
public interface SkuInfoService extends IService<SkuInfoEntity> {
PageUtils queryPage(Map<String, Object> params);
void saveSkuInfo(SkuInfoEntity skuInfoEntity);
}
|
//,temp,JournalCorruptionExceptionTest.java,144,171,temp,MKahaDBTxRecoveryTest.java,468,500
//,3
public class xxx {
private void corruptTxStore(String pathToDataDir, boolean truncate) throws Exception {
LOG.info("Path to broker datadir: " + pathToDataDir);
RandomAccessFile randomAccessFile = new RandomAccessFile(String.format("%s/mKahaDB/txStore/db-1.log", pathToDataDir), "rw");
final ByteSequence header = new ByteSequence(Journal.BATCH_CONTROL_RECORD_HEADER);
byte data[] = new byte[1024 * 20];
ByteSequence bs = new ByteSequence(data, 0, randomAccessFile.read(data, 0, data.length));
int offset = bs.indexOf(header, 1);
offset = bs.indexOf(header, offset+1);
offset = bs.indexOf(header, offset+1);
// 3rd batch
LOG.info("3rd batch record in file: 1:" + offset);
offset += Journal.BATCH_CONTROL_RECORD_SIZE;
offset += 4; // location size
offset += 1; // location type
byte fill = (byte) 0xAF;
LOG.info("Whacking batch record in file:" + 1 + ", at offset: " + offset + " with fill:" + fill);
// whack that record
byte[] bla = new byte[2];
Arrays.fill(bla, fill);
randomAccessFile.seek(offset);
randomAccessFile.write(bla, 0, bla.length);
if (truncate) {
// set length to truncate
randomAccessFile.setLength(randomAccessFile.getFilePointer());
}
randomAccessFile.getFD().sync();
}
};
|
package net.sf.l2j.gameserver.model.group;
import java.util.List;
import net.sf.l2j.commons.random.Rnd;
import net.sf.l2j.gameserver.model.WorldObject;
import net.sf.l2j.gameserver.model.actor.instance.Player;
import net.sf.l2j.gameserver.network.SystemMessageId;
import net.sf.l2j.gameserver.network.serverpackets.CreatureSay;
import net.sf.l2j.gameserver.network.serverpackets.L2GameServerPacket;
import net.sf.l2j.gameserver.network.serverpackets.SystemMessage;
public abstract class AbstractGroup
{
private Player _leader;
private int _level;
public AbstractGroup(Player leader)
{
_leader = leader;
}
/**
* @return a list of all members of this group.
*/
public abstract List<Player> getMembers();
/**
* @return the count of all players in this group.
*/
public abstract int getMembersCount();
/**
* Check if this group contains a given player.
* @param player : the player to check.
* @return {@code true} if this group contains the specified player, {@code false} otherwise.
*/
public abstract boolean containsPlayer(final WorldObject player);
/**
* Broadcast a packet to every member of this group.
* @param packet : the packet to broadcast.
*/
public abstract void broadcastPacket(final L2GameServerPacket packet);
/**
* Broadcast a CreatureSay packet to every member of this group. Similar to broadcastPacket, but with an embbed BlockList check.
* @param msg : the msg to broadcast.
* @param broadcaster : the player who broadcasts the message.
*/
public abstract void broadcastCreatureSay(final CreatureSay msg, final Player broadcaster);
/**
* Recalculate the group level.
*/
public abstract void recalculateLevel();
/**
* Destroy that group, resetting all possible values, leading to that group object destruction.
*/
public abstract void disband();
/**
* @return the level of this group.
*/
public int getLevel()
{
return _level;
}
/**
* Change the level of this group. <b>Used only when the group is created.</b>
* @param level : the level to set.
*/
public void setLevel(int level)
{
_level = level;
}
/**
* @return the leader of this group.
*/
public Player getLeader()
{
return _leader;
}
/**
* Change the leader of this group to the specified player.
* @param leader : the player to set as the new leader of this group.
*/
public void setLeader(Player leader)
{
_leader = leader;
}
/**
* @return the leader objectId.
*/
public int getLeaderObjectId()
{
return _leader.getObjectId();
}
/**
* Check if a given player is the leader of this group.
* @param player : the player to check.
* @return {@code true} if the specified player is the leader of this group, {@code false} otherwise.
*/
public boolean isLeader(Player player)
{
return _leader.getObjectId() == player.getObjectId();
}
/**
* Broadcast a system message to this group.
* @param message : the system message to broadcast.
*/
public void broadcastMessage(SystemMessageId message)
{
broadcastPacket(SystemMessage.getSystemMessage(message));
}
/**
* Broadcast a custom text message to this group.
* @param text : the custom string to broadcast.
*/
public void broadcastString(String text)
{
broadcastPacket(SystemMessage.sendString(text));
}
/**
* @return a random member of this group.
*/
public Player getRandomPlayer()
{
return Rnd.get(getMembers());
}
}
|
/**
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech) (4.2.0-SNAPSHOT).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package org.openapitools.api;
import org.openapitools.model.ModelApiResponse;
import org.openapitools.model.Pet;
import org.springframework.core.io.Resource;
import io.swagger.annotations.*;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestHeader;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RequestPart;
import org.springframework.web.context.request.NativeWebRequest;
import org.springframework.web.multipart.MultipartFile;
import javax.validation.Valid;
import javax.validation.constraints.*;
import java.util.List;
import java.util.Map;
import java.util.Optional;
@Validated
@Api(value = "Pet", description = "the Pet API")
public interface PetApi {
@ApiOperation(value = "Add a new pet to the store", nickname = "addPet", notes = "", authorizations = {
@Authorization(value = "petstore_auth", scopes = {
@AuthorizationScope(scope = "write:pets", description = "modify pets in your account"),
@AuthorizationScope(scope = "read:pets", description = "read your pets")
})
}, tags={ "pet", })
@ApiResponses(value = {
@ApiResponse(code = 405, message = "Invalid input") })
@RequestMapping(value = "/pet",
consumes = "application/json",
method = RequestMethod.POST)
com.netflix.hystrix.HystrixCommand<ResponseEntity<Void>> addPet(@ApiParam(value = "Pet object that needs to be added to the store" ,required=true ) @Valid @RequestBody Pet body);
@ApiOperation(value = "Deletes a pet", nickname = "deletePet", notes = "", authorizations = {
@Authorization(value = "petstore_auth", scopes = {
@AuthorizationScope(scope = "write:pets", description = "modify pets in your account"),
@AuthorizationScope(scope = "read:pets", description = "read your pets")
})
}, tags={ "pet", })
@ApiResponses(value = {
@ApiResponse(code = 400, message = "Invalid pet value") })
@RequestMapping(value = "/pet/{petId}",
method = RequestMethod.DELETE)
com.netflix.hystrix.HystrixCommand<ResponseEntity<Void>> deletePet(@ApiParam(value = "Pet id to delete",required=true) @PathVariable("petId") Long petId,@ApiParam(value = "" ) @RequestHeader(value="api_key", required=false) String apiKey);
@ApiOperation(value = "Finds Pets by status", nickname = "findPetsByStatus", notes = "Multiple status values can be provided with comma separated strings", response = Pet.class, responseContainer = "List", authorizations = {
@Authorization(value = "petstore_auth", scopes = {
@AuthorizationScope(scope = "write:pets", description = "modify pets in your account"),
@AuthorizationScope(scope = "read:pets", description = "read your pets")
})
}, tags={ "pet", })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "successful operation", response = Pet.class, responseContainer = "List"),
@ApiResponse(code = 400, message = "Invalid status value") })
@RequestMapping(value = "/pet/findByStatus",
produces = "application/json",
method = RequestMethod.GET)
com.netflix.hystrix.HystrixCommand<ResponseEntity<List<Pet>>> findPetsByStatus(@NotNull @ApiParam(value = "Status values that need to be considered for filter", required = true, allowableValues = "available, pending, sold") @Valid @RequestParam(value = "status", required = true) List<String> status);
@ApiOperation(value = "Finds Pets by tags", nickname = "findPetsByTags", notes = "Multiple tags can be provided with comma separated strings. Use tag1, tag2, tag3 for testing.", response = Pet.class, responseContainer = "List", authorizations = {
@Authorization(value = "petstore_auth", scopes = {
@AuthorizationScope(scope = "write:pets", description = "modify pets in your account"),
@AuthorizationScope(scope = "read:pets", description = "read your pets")
})
}, tags={ "pet", })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "successful operation", response = Pet.class, responseContainer = "List"),
@ApiResponse(code = 400, message = "Invalid tag value") })
@RequestMapping(value = "/pet/findByTags",
produces = "application/json",
method = RequestMethod.GET)
com.netflix.hystrix.HystrixCommand<ResponseEntity<List<Pet>>> findPetsByTags(@NotNull @ApiParam(value = "Tags to filter by", required = true) @Valid @RequestParam(value = "tags", required = true) List<String> tags);
@ApiOperation(value = "Find pet by ID", nickname = "getPetById", notes = "Returns a single pet", response = Pet.class, authorizations = {
@Authorization(value = "api_key")
}, tags={ "pet", })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "successful operation", response = Pet.class),
@ApiResponse(code = 400, message = "Invalid ID supplied"),
@ApiResponse(code = 404, message = "Pet not found") })
@RequestMapping(value = "/pet/{petId}",
produces = "application/json",
method = RequestMethod.GET)
com.netflix.hystrix.HystrixCommand<ResponseEntity<Pet>> getPetById(@ApiParam(value = "ID of pet to return",required=true) @PathVariable("petId") Long petId);
@ApiOperation(value = "Update an existing pet", nickname = "updatePet", notes = "", authorizations = {
@Authorization(value = "petstore_auth", scopes = {
@AuthorizationScope(scope = "write:pets", description = "modify pets in your account"),
@AuthorizationScope(scope = "read:pets", description = "read your pets")
})
}, tags={ "pet", })
@ApiResponses(value = {
@ApiResponse(code = 400, message = "Invalid ID supplied"),
@ApiResponse(code = 404, message = "Pet not found"),
@ApiResponse(code = 405, message = "Validation exception") })
@RequestMapping(value = "/pet",
consumes = "application/json",
method = RequestMethod.PUT)
com.netflix.hystrix.HystrixCommand<ResponseEntity<Void>> updatePet(@ApiParam(value = "Pet object that needs to be added to the store" ,required=true ) @Valid @RequestBody Pet body);
@ApiOperation(value = "Updates a pet in the store with form data", nickname = "updatePetWithForm", notes = "", authorizations = {
@Authorization(value = "petstore_auth", scopes = {
@AuthorizationScope(scope = "write:pets", description = "modify pets in your account"),
@AuthorizationScope(scope = "read:pets", description = "read your pets")
})
}, tags={ "pet", })
@ApiResponses(value = {
@ApiResponse(code = 405, message = "Invalid input") })
@RequestMapping(value = "/pet/{petId}",
consumes = "application/x-www-form-urlencoded",
method = RequestMethod.POST)
com.netflix.hystrix.HystrixCommand<ResponseEntity<Void>> updatePetWithForm(@ApiParam(value = "ID of pet that needs to be updated",required=true) @PathVariable("petId") Long petId,@ApiParam(value = "Updated name of the pet" ) @RequestParam(value="name", required=false) String name,@ApiParam(value = "Updated status of the pet" ) @RequestParam(value="status", required=false) String status);
@ApiOperation(value = "uploads an image", nickname = "uploadFile", notes = "", response = ModelApiResponse.class, authorizations = {
@Authorization(value = "petstore_auth", scopes = {
@AuthorizationScope(scope = "write:pets", description = "modify pets in your account"),
@AuthorizationScope(scope = "read:pets", description = "read your pets")
})
}, tags={ "pet", })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "successful operation", response = ModelApiResponse.class) })
@RequestMapping(value = "/pet/{petId}/uploadImage",
produces = "application/json",
consumes = "multipart/form-data",
method = RequestMethod.POST)
com.netflix.hystrix.HystrixCommand<ResponseEntity<ModelApiResponse>> uploadFile(@ApiParam(value = "ID of pet to update",required=true) @PathVariable("petId") Long petId,@ApiParam(value = "Additional data to pass to server" ) @RequestParam(value="additionalMetadata", required=false) String additionalMetadata,@ApiParam(value = "file detail") @RequestParam("file") MultipartFile file);
}
|
package org.yuno.apps.dragdropmanager.app;
import org.yuno.apps.dragdropmanager.DragDropManager;
import android.os.Bundle;
public class Activity extends android.app.Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
DragDropManager.createInstance(this);
super.onCreate(savedInstanceState);
}
@Override
protected void onDestroy() {
DragDropManager.destroyInstance();
super.onDestroy();
}
}
|
package org.isisaddons.module.publishmq.dom.contracttests.with;
import com.google.common.collect.ImmutableMap;
import org.incode.module.base.dom.with.ComparableByNameContractTestAbstract_compareTo;
import org.incode.module.base.dom.with.WithNameComparable;
/**
* Automatically tests all domain objects implementing
* {@link WithNameComparable}.
*/
public class WithNameComparableContractForIncodeModuleTest_compareTo extends
ComparableByNameContractTestAbstract_compareTo {
public WithNameComparableContractForIncodeModuleTest_compareTo() {
super("org.isisaddons.module.publishmq", ImmutableMap.<Class<?>, Class<?>>of());
}
}
|
package com.ctrip.framework.cs;
import com.ctrip.framework.cs.util.DesUtil;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
/**
* Created by jiang.j on 2016/5/3.
*/
public class DesUtilTest {
@Test
public void testEncrypt(){
String key="somekey09!@%%$";
String data = "hello world";
try {
assertEquals(data, DesUtil.decrypt(DesUtil.encrypt(data, key), key));
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.10.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.airavata.model.user;
import java.util.Map;
import java.util.HashMap;
import org.apache.thrift.TEnum;
public enum Status implements org.apache.thrift.TEnum {
ACTIVE(0),
CONFIRMED(1),
APPROVED(2),
DELETED(3),
DUPLICATE(4),
GRACE_PERIOD(5),
INVITED(6),
DENIED(7),
PENDING(8),
PENDING_APPROVAL(9),
PENDING_CONFIRMATION(10),
SUSPENDED(11),
DECLINED(12),
EXPIRED(13);
private final int value;
private Status(int value) {
this.value = value;
}
/**
* Get the integer value of this enum value, as defined in the Thrift IDL.
*/
public int getValue() {
return value;
}
/**
* Find a the enum type by its integer value, as defined in the Thrift IDL.
* @return null if the value is not found.
*/
public static Status findByValue(int value) {
switch (value) {
case 0:
return ACTIVE;
case 1:
return CONFIRMED;
case 2:
return APPROVED;
case 3:
return DELETED;
case 4:
return DUPLICATE;
case 5:
return GRACE_PERIOD;
case 6:
return INVITED;
case 7:
return DENIED;
case 8:
return PENDING;
case 9:
return PENDING_APPROVAL;
case 10:
return PENDING_CONFIRMATION;
case 11:
return SUSPENDED;
case 12:
return DECLINED;
case 13:
return EXPIRED;
default:
return null;
}
}
}
|
/*******************************************************************************
* Copyright (c) 2000, 2009 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
* Matt Chapman, mpchapman@gmail.com - 89977 Make JDT .java agnostic
*******************************************************************************/
package org.eclipse.jdt.internal.ui.wizards;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import com.ibm.icu.text.Collator;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.OperationCanceledException;
import org.eclipse.core.runtime.Path;
import org.eclipse.core.runtime.Status;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.resources.IResourceProxy;
import org.eclipse.core.resources.IResourceProxyVisitor;
import org.eclipse.jdt.core.IClasspathEntry;
import org.eclipse.jdt.core.ICompilationUnit;
import org.eclipse.jdt.core.JavaConventions;
import org.eclipse.jdt.core.JavaCore;
import org.eclipse.jdt.core.ToolFactory;
import org.eclipse.jdt.core.compiler.CharOperation;
import org.eclipse.jdt.core.dom.AST;
import org.eclipse.jdt.core.dom.ASTParser;
import org.eclipse.jdt.core.dom.CompilationUnit;
import org.eclipse.jdt.core.dom.PackageDeclaration;
import org.eclipse.jdt.core.util.IClassFileReader;
import org.eclipse.jdt.core.util.ISourceAttribute;
import org.eclipse.jdt.internal.corext.util.JavaConventionsUtil;
import org.eclipse.jdt.internal.corext.util.Messages;
import org.eclipse.jdt.ui.PreferenceConstants;
import org.eclipse.jdt.internal.ui.JavaPlugin;
import org.eclipse.jdt.internal.ui.viewsupport.BasicElementLabels;
/**
*/
public class ClassPathDetector implements IResourceProxyVisitor {
private HashMap fSourceFolders;
private List fClassFiles;
private HashSet fJARFiles;
private IProject fProject;
private IPath fResultOutputFolder;
private IClasspathEntry[] fResultClasspath;
private IProgressMonitor fMonitor;
private static class CPSorter implements Comparator {
private Collator fCollator= Collator.getInstance();
public int compare(Object o1, Object o2) {
IClasspathEntry e1= (IClasspathEntry) o1;
IClasspathEntry e2= (IClasspathEntry) o2;
return fCollator.compare(e1.getPath().toString(), e2.getPath().toString());
}
}
public ClassPathDetector(IProject project, IProgressMonitor monitor) throws CoreException {
fSourceFolders= new HashMap();
fJARFiles= new HashSet(10);
fClassFiles= new ArrayList(100);
fProject= project;
fResultClasspath= null;
fResultOutputFolder= null;
if (monitor == null) {
monitor = new NullProgressMonitor();
}
detectClasspath(monitor);
}
private boolean isNested(IPath path, Iterator iter) {
while (iter.hasNext()) {
IPath other= (IPath) iter.next();
if (other.isPrefixOf(path)) {
return true;
}
}
return false;
}
/**
* Method detectClasspath.
*
* @param monitor The progress monitor (not null)
* @throws CoreException in case of any failure
*/
private void detectClasspath(IProgressMonitor monitor) throws CoreException {
try {
monitor.beginTask(NewWizardMessages.ClassPathDetector_operation_description, 4);
fMonitor= monitor;
fProject.accept(this, IResource.NONE);
monitor.worked(1);
ArrayList cpEntries= new ArrayList();
detectSourceFolders(cpEntries);
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
monitor.worked(1);
IPath outputLocation= detectOutputFolder();
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
monitor.worked(1);
detectLibraries(cpEntries, outputLocation);
if (monitor.isCanceled()) {
throw new OperationCanceledException();
}
monitor.worked(1);
if (cpEntries.isEmpty() && fClassFiles.isEmpty()) {
return;
}
IClasspathEntry[] jreEntries= PreferenceConstants.getDefaultJRELibrary();
for (int i= 0; i < jreEntries.length; i++) {
cpEntries.add(jreEntries[i]);
}
IClasspathEntry[] entries= (IClasspathEntry[]) cpEntries.toArray(new IClasspathEntry[cpEntries.size()]);
if (!JavaConventions.validateClasspath(JavaCore.create(fProject), entries, outputLocation).isOK()) {
return;
}
fResultClasspath= entries;
fResultOutputFolder= outputLocation;
} finally {
monitor.done();
}
}
private IPath findInSourceFolders(IPath path) {
Iterator iter= fSourceFolders.keySet().iterator();
while (iter.hasNext()) {
Object key= iter.next();
List cus= (List) fSourceFolders.get(key);
if (cus.contains(path)) {
return (IPath) key;
}
}
return null;
}
private IPath detectOutputFolder() throws CoreException {
HashSet classFolders= new HashSet();
for (Iterator iter= fClassFiles.iterator(); iter.hasNext();) {
IFile file= (IFile) iter.next();
IClassFileReader reader= null;
InputStream content= null;
try {
content= file.getContents();
reader= ToolFactory.createDefaultClassFileReader(content, IClassFileReader.CLASSFILE_ATTRIBUTES);
} finally {
try {
if (content != null)
content.close();
} catch (IOException e) {
throw new CoreException(new Status(IStatus.ERROR, JavaPlugin.getPluginId(), IStatus.ERROR,
Messages.format(NewWizardMessages.ClassPathDetector_error_closing_file, BasicElementLabels.getPathLabel(file.getFullPath(), false)),
e));
}
}
if (reader == null) {
continue; // problematic class file
}
char[] className= reader.getClassName();
ISourceAttribute sourceAttribute= reader.getSourceFileAttribute();
if (className != null && sourceAttribute != null && sourceAttribute.getSourceFileName() != null) {
IPath packPath= file.getParent().getFullPath();
int idx= CharOperation.lastIndexOf('/', className) + 1;
IPath relPath= new Path(new String(className, 0, idx));
IPath cuPath= relPath.append(new String(sourceAttribute.getSourceFileName()));
IPath resPath= null;
if (idx == 0) {
resPath= packPath;
} else {
IPath folderPath= getFolderPath(packPath, relPath);
if (folderPath != null) {
resPath= folderPath;
}
}
if (resPath != null) {
IPath path= findInSourceFolders(cuPath);
if (path != null) {
return resPath;
} else {
classFolders.add(resPath);
}
}
}
}
IPath projPath= fProject.getFullPath();
if (fSourceFolders.size() == 1 && classFolders.isEmpty() && fSourceFolders.get(projPath) != null) {
return projPath;
} else {
IPath path= projPath.append(PreferenceConstants.getPreferenceStore().getString(PreferenceConstants.SRCBIN_BINNAME));
while (classFolders.contains(path)) {
path= new Path(path.toString() + '1');
}
return path;
}
}
private void detectLibraries(ArrayList cpEntries, IPath outputLocation) {
ArrayList res= new ArrayList();
Set sourceFolderSet= fSourceFolders.keySet();
for (Iterator iter= fJARFiles.iterator(); iter.hasNext();) {
IPath path= (IPath) iter.next();
if (isNested(path, sourceFolderSet.iterator())) {
continue;
}
if (outputLocation != null && outputLocation.isPrefixOf(path)) {
continue;
}
IClasspathEntry entry= JavaCore.newLibraryEntry(path, null, null);
res.add(entry);
}
Collections.sort(res, new CPSorter());
cpEntries.addAll(res);
}
private void detectSourceFolders(ArrayList resEntries) {
ArrayList res= new ArrayList();
Set sourceFolderSet= fSourceFolders.keySet();
for (Iterator iter= sourceFolderSet.iterator(); iter.hasNext();) {
IPath path= (IPath) iter.next();
ArrayList excluded= new ArrayList();
for (Iterator inner= sourceFolderSet.iterator(); inner.hasNext();) {
IPath other= (IPath) inner.next();
if (!path.equals(other) && path.isPrefixOf(other)) {
IPath pathToExclude= other.removeFirstSegments(path.segmentCount()).addTrailingSeparator();
excluded.add(pathToExclude);
}
}
IPath[] excludedPaths= (IPath[]) excluded.toArray(new IPath[excluded.size()]);
IClasspathEntry entry= JavaCore.newSourceEntry(path, excludedPaths);
res.add(entry);
}
Collections.sort(res, new CPSorter());
resEntries.addAll(res);
}
private void visitCompilationUnit(IFile file) {
ICompilationUnit cu= JavaCore.createCompilationUnitFrom(file);
if (cu != null) {
ASTParser parser= ASTParser.newParser(AST.JLS3);
parser.setSource(cu);
parser.setFocalPosition(0);
CompilationUnit root= (CompilationUnit)parser.createAST(null);
PackageDeclaration packDecl= root.getPackage();
IPath packPath= file.getParent().getFullPath();
String cuName= file.getName();
if (packDecl == null) {
addToMap(fSourceFolders, packPath, new Path(cuName));
} else {
IPath relPath= new Path(packDecl.getName().getFullyQualifiedName().replace('.', '/'));
IPath folderPath= getFolderPath(packPath, relPath);
if (folderPath != null) {
addToMap(fSourceFolders, folderPath, relPath.append(cuName));
}
}
}
}
private void addToMap(HashMap map, IPath folderPath, IPath relPath) {
List list= (List) map.get(folderPath);
if (list == null) {
list= new ArrayList(50);
map.put(folderPath, list);
}
list.add(relPath);
}
private IPath getFolderPath(IPath packPath, IPath relpath) {
int remainingSegments= packPath.segmentCount() - relpath.segmentCount();
if (remainingSegments >= 0) {
IPath common= packPath.removeFirstSegments(remainingSegments);
if (common.equals(relpath)) {
return packPath.uptoSegment(remainingSegments);
}
}
return null;
}
private boolean hasExtension(String name, String ext) {
return name.endsWith(ext) && (ext.length() != name.length());
}
private boolean isValidCUName(String name) {
return !JavaConventionsUtil.validateCompilationUnitName(name, JavaCore.create(fProject)).matches(IStatus.ERROR);
}
/* (non-Javadoc)
* @see org.eclipse.core.resources.IResourceProxyVisitor#visit(org.eclipse.core.resources.IResourceProxy)
*/
public boolean visit(IResourceProxy proxy) {
if (fMonitor.isCanceled()) {
throw new OperationCanceledException();
}
if (proxy.getType() == IResource.FILE) {
String name= proxy.getName();
if (isValidCUName(name)) {
visitCompilationUnit((IFile) proxy.requestResource());
} else if (hasExtension(name, ".class")) { //$NON-NLS-1$
fClassFiles.add(proxy.requestResource());
} else if (hasExtension(name, ".jar")) { //$NON-NLS-1$
fJARFiles.add(proxy.requestFullPath());
}
return false;
}
return true;
}
public IPath getOutputLocation() {
return fResultOutputFolder;
}
public IClasspathEntry[] getClasspath() {
if (fResultClasspath == null)
return new IClasspathEntry[0];
return fResultClasspath;
}
}
|
package com.zou.gulimall.coupon.dao;
import com.zou.gulimall.coupon.entity.CouponHistoryEntity;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
/**
* 优惠券领取历史记录
*
* @author zou
* @email zouhuan@qq.com
* @date 2021-01-04 20:53:47
*/
@Mapper
public interface CouponHistoryDao extends BaseMapper<CouponHistoryEntity> {
}
|
package org.vitrivr.cineast.core.db.dao;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.vitrivr.cineast.core.data.entities.MediaSegmentMetadataDescriptor;
import org.vitrivr.cineast.core.data.providers.primitive.PrimitiveTypeProvider;
import org.vitrivr.cineast.core.data.providers.primitive.StringTypeProvider;
import org.vitrivr.cineast.core.db.DBSelector;
import org.vitrivr.cineast.core.db.PersistencyWriter;
import org.vitrivr.cineast.core.db.PersistentTuple;
import org.vitrivr.cineast.core.db.dao.reader.DatabaseLookupException;
import org.vitrivr.cineast.core.util.LogHelper;
import java.io.Closeable;
import java.util.*;
import static org.vitrivr.cineast.core.data.entities.MediaSegmentMetadataDescriptor.ENTITY;
import static org.vitrivr.cineast.core.data.entities.MediaSegmentMetadataDescriptor.FIELDNAMES;
public class MediaSegmentMetadataHandler implements Closeable {
private final PersistencyWriter<?> writer;
private final DBSelector selector;
private static final Logger LOGGER = LogManager.getLogger();
public MediaSegmentMetadataHandler(DBSelector selector, PersistencyWriter<?> writer) {
this.selector = selector;
this.writer = writer;
if (this.selector == null) {
throw new NullPointerException("selector cannot be null");
}
if (this.writer == null) {
throw new NullPointerException("writer cannot be null");
}
this.selector.open(ENTITY);
this.writer.open(ENTITY);
this.writer.setFieldNames(FIELDNAMES);
}
public boolean addDescriptor(MediaSegmentMetadataDescriptor descriptor){
if (descriptor == null){
return false;
}
return this.writer.persist(this.toPersistenTuple(descriptor));
}
public boolean addDescriptors(Collection<MediaSegmentMetadataDescriptor> descriptors){
if (descriptors == null){
return false;
}
if(descriptors.isEmpty()){
return true;
}
ArrayList<PersistentTuple> tuples = new ArrayList<>(descriptors.size());
for(MediaSegmentMetadataDescriptor descriptor : descriptors){
tuples.add(this.toPersistenTuple(descriptor));
}
return this.writer.persist(tuples);
}
public List<MediaSegmentMetadataDescriptor> getDescriptors(String segmentId) {
if (segmentId == null || segmentId.isEmpty()){
return Collections.emptyList();
}
List<Map<String, PrimitiveTypeProvider>> rows = this.selector.getRows(FIELDNAMES[0], new StringTypeProvider(segmentId));
ArrayList<MediaSegmentMetadataDescriptor> _return = new ArrayList<>(rows.size());
for (Map<String, PrimitiveTypeProvider> row : rows) {
MediaSegmentMetadataDescriptor d = null;
try {
d = new MediaSegmentMetadataDescriptor(row);
} catch (DatabaseLookupException e) {
LOGGER.error(LogHelper.getStackTrace(e));
}
if (d != null) {
_return.add(d);
}
}
return _return;
}
@Override
public void close() {
this.selector.close();
this.writer.close();
}
private PersistentTuple toPersistenTuple(MediaSegmentMetadataDescriptor descriptor){
return this.writer.generateTuple(descriptor.getSegmentId(), descriptor.getDomain(), descriptor.getKey(), descriptor.getValue());
}
}
|
/* GENERATED SOURCE. DO NOT MODIFY. */
// © 2016 and later: Unicode, Inc. and others.
// License & terms of use: http://www.unicode.org/copyright.html#License
/**
*******************************************************************************
* Copyright (C) 1996-2016, International Business Machines Corporation and
* others. All Rights Reserved.
*******************************************************************************
*/
package ohos.global.icu.text;
import java.text.CharacterIterator;
import java.util.HashMap;
import java.util.Map;
import ohos.global.icu.impl.CharacterIteratorWrapper;
import ohos.global.icu.impl.coll.Collation;
import ohos.global.icu.impl.coll.CollationData;
import ohos.global.icu.impl.coll.CollationIterator;
import ohos.global.icu.impl.coll.ContractionsAndExpansions;
import ohos.global.icu.impl.coll.FCDIterCollationIterator;
import ohos.global.icu.impl.coll.FCDUTF16CollationIterator;
import ohos.global.icu.impl.coll.IterCollationIterator;
import ohos.global.icu.impl.coll.UTF16CollationIterator;
import ohos.global.icu.impl.coll.UVector32;
/**
* <code>CollationElementIterator</code> is an iterator created by
* a RuleBasedCollator to walk through a string. The return result of
* each iteration is a 32-bit collation element (CE) that defines the
* ordering priority of the next character or sequence of characters
* in the source string.
*
* <p>For illustration, consider the following in Slovak and in traditional Spanish collation:
* <blockquote>
* <pre>
* "ca" -> the first collation element is CE('c') and the second
* collation element is CE('a').
* "cha" -> the first collation element is CE('ch') and the second
* collation element is CE('a').
* </pre>
* </blockquote>
* And in German phonebook collation,
* <blockquote>
* <pre>
* Since the character 'æ' is a composed character of 'a' and 'e', the
* iterator returns two collation elements for the single character 'æ'
*
* "æb" -> the first collation element is collation_element('a'), the
* second collation element is collation_element('e'), and the
* third collation element is collation_element('b').
* </pre>
* </blockquote>
*
* <p>For collation ordering comparison, the collation element results
* can not be compared simply by using basic arithmetic operators,
* e.g. <, == or >, further processing has to be done. Details
* can be found in the ICU
* <a href="http://userguide.icu-project.org/collation/architecture">
* User Guide</a>. An example of using the CollationElementIterator
* for collation ordering comparison is the class
* {@link ohos.global.icu.text.StringSearch}.
*
* <p>To construct a CollationElementIterator object, users
* call the method getCollationElementIterator() on a
* RuleBasedCollator that defines the desired sorting order.
*
* <p> Example:
* <blockquote>
* <pre>
* String testString = "This is a test";
* RuleBasedCollator rbc = new RuleBasedCollator("&a<b");
* CollationElementIterator iterator = rbc.getCollationElementIterator(testString);
* int primaryOrder = iterator.IGNORABLE;
* while (primaryOrder != iterator.NULLORDER) {
* int order = iterator.next();
* if (order != iterator.IGNORABLE &&
* order != iterator.NULLORDER) {
* // order is valid, not ignorable and we have not passed the end
* // of the iteration, we do something
* primaryOrder = CollationElementIterator.primaryOrder(order);
* System.out.println("Next primary order 0x" +
* Integer.toHexString(primaryOrder));
* }
* }
* </pre>
* </blockquote>
* <p>
* The method next() returns the collation order of the next character based on
* the comparison level of the collator. The method previous() returns the
* collation order of the previous character based on the comparison level of
* the collator. The Collation Element Iterator moves only in one direction
* between calls to reset(), setOffset(), or setText(). That is, next() and
* previous() can not be inter-used. Whenever previous() is to be called after
* next() or vice versa, reset(), setOffset() or setText() has to be called first
* to reset the status, shifting current position to either the end or the start of
* the string (reset() or setText()), or the specified position (setOffset()).
* Hence at the next call of next() or previous(), the first or last collation order,
* or collation order at the specified position will be returned. If a change of
* direction is done without one of these calls, the result is undefined.
* <p>
* This class is not subclassable.
* @see Collator
* @see RuleBasedCollator
* @see StringSearch
* @author Syn Wee Quek
*/
public final class CollationElementIterator
{
private CollationIterator iter_; // owned
private RuleBasedCollator rbc_; // aliased
private int otherHalf_;
/**
* <0: backwards; 0: just after reset() (previous() begins from end);
* 1: just after setOffset(); >1: forward
*/
private byte dir_;
/**
* Stores offsets from expansions and from unsafe-backwards iteration,
* so that getOffset() returns intermediate offsets for the CEs
* that are consistent with forward iteration.
*/
private UVector32 offsets_;
private String string_; // TODO: needed in Java? if so, then add a UCharacterIterator field too?
/**
* This constant is returned by the iterator in the methods
* next() and previous() when the end or the beginning of the
* source string has been reached, and there are no more valid
* collation elements to return.
*
* <p>See class documentation for an example of use.
* @see #next
* @see #previous */
public final static int NULLORDER = 0xffffffff;
/**
* This constant is returned by the iterator in the methods
* next() and previous() when a collation element result is to be
* ignored.
*
* <p>See class documentation for an example of use.
* @see #next
* @see #previous */
public static final int IGNORABLE = 0;
/**
* Return the primary order of the specified collation element,
* i.e. the first 16 bits. This value is unsigned.
* @param ce the collation element
* @return the element's 16 bits primary order.
*/
public final static int primaryOrder(int ce) {
return (ce >>> 16) & 0xffff;
}
/**
* Return the secondary order of the specified collation element,
* i.e. the 16th to 23th bits, inclusive. This value is unsigned.
* @param ce the collation element
* @return the element's 8 bits secondary order
*/
public final static int secondaryOrder(int ce) {
return (ce >>> 8) & 0xff;
}
/**
* Return the tertiary order of the specified collation element, i.e. the last
* 8 bits. This value is unsigned.
* @param ce the collation element
* @return the element's 8 bits tertiary order
*/
public final static int tertiaryOrder(int ce) {
return ce & 0xff;
}
private static final int getFirstHalf(long p, int lower32) {
return ((int)p & 0xffff0000) | ((lower32 >> 16) & 0xff00) | ((lower32 >> 8) & 0xff);
}
private static final int getSecondHalf(long p, int lower32) {
return ((int)p << 16) | ((lower32 >> 8) & 0xff00) | (lower32 & 0x3f);
}
private static final boolean ceNeedsTwoParts(long ce) {
return (ce & 0xffff00ff003fL) != 0;
}
private CollationElementIterator(RuleBasedCollator collator) {
iter_ = null;
rbc_ = collator;
otherHalf_ = 0;
dir_ = 0;
offsets_ = null;
}
/**
* CollationElementIterator constructor. This takes a source
* string and a RuleBasedCollator. The iterator will walk through
* the source string based on the rules defined by the
* collator. If the source string is empty, NULLORDER will be
* returned on the first call to next().
*
* @param source the source string.
* @param collator the RuleBasedCollator
*/
CollationElementIterator(String source, RuleBasedCollator collator) {
this(collator);
setText(source);
}
// Note: The constructors should take settings & tailoring, not a collator,
// to avoid circular dependencies.
// However, for equals() we would need to be able to compare tailoring data for equality
// without making CollationData or CollationTailoring depend on TailoredSet.
// (See the implementation of RuleBasedCollator.equals().)
// That might require creating an intermediate class that would be used
// by both CollationElementIterator and RuleBasedCollator
// but only contain the part of RBC.equals() related to data and rules.
/**
* CollationElementIterator constructor. This takes a source
* character iterator and a RuleBasedCollator. The iterator will
* walk through the source string based on the rules defined by
* the collator. If the source string is empty, NULLORDER will be
* returned on the first call to next().
*
* @param source the source string iterator.
* @param collator the RuleBasedCollator
*/
CollationElementIterator(CharacterIterator source, RuleBasedCollator collator) {
this(collator);
setText(source);
}
/**
* CollationElementIterator constructor. This takes a source
* character iterator and a RuleBasedCollator. The iterator will
* walk through the source string based on the rules defined by
* the collator. If the source string is empty, NULLORDER will be
* returned on the first call to next().
*
* @param source the source string iterator.
* @param collator the RuleBasedCollator
*/
CollationElementIterator(UCharacterIterator source, RuleBasedCollator collator) {
this(collator);
setText(source);
}
/**
* Returns the character offset in the source string
* corresponding to the next collation element. I.e., getOffset()
* returns the position in the source string corresponding to the
* collation element that will be returned by the next call to
* next() or previous(). This value could be any of:
* <ul>
* <li> The index of the <b>first</b> character corresponding to
* the next collation element. (This means that if
* <code>setOffset(offset)</code> sets the index in the middle of
* a contraction, <code>getOffset()</code> returns the index of
* the first character in the contraction, which may not be equal
* to the original offset that was set. Hence calling getOffset()
* immediately after setOffset(offset) does not guarantee that the
* original offset set will be returned.)
* <li> If normalization is on, the index of the <b>immediate</b>
* subsequent character, or composite character with the first
* character, having a combining class of 0.
* <li> The length of the source string, if iteration has reached
* the end.
*</ul>
*
* @return The character offset in the source string corresponding to the
* collation element that will be returned by the next call to
* next() or previous().
*/
public int getOffset() {
if (dir_ < 0 && offsets_ != null && !offsets_.isEmpty()) {
// CollationIterator.previousCE() decrements the CEs length
// while it pops CEs from its internal buffer.
int i = iter_.getCEsLength();
if (otherHalf_ != 0) {
// Return the trailing CE offset while we are in the middle of a 64-bit CE.
++i;
}
assert (i < offsets_.size());
return offsets_.elementAti(i);
}
return iter_.getOffset();
}
/**
* Get the next collation element in the source string.
*
* <p>This iterator iterates over a sequence of collation elements
* that were built from the string. Because there isn't
* necessarily a one-to-one mapping from characters to collation
* elements, this doesn't mean the same thing as "return the
* collation element [or ordering priority] of the next character
* in the string".
*
* <p>This function returns the collation element that the
* iterator is currently pointing to, and then updates the
* internal pointer to point to the next element.
*
* @return the next collation element or NULLORDER if the end of the
* iteration has been reached.
*/
public int next() {
if (dir_ > 1) {
// Continue forward iteration. Test this first.
if (otherHalf_ != 0) {
int oh = otherHalf_;
otherHalf_ = 0;
return oh;
}
} else if (dir_ == 1) {
// next() after setOffset()
dir_ = 2;
} else if (dir_ == 0) {
// The iter_ is already reset to the start of the text.
dir_ = 2;
} else /* dir_ < 0 */{
// illegal change of direction
throw new IllegalStateException("Illegal change of direction");
// Java porting note: ICU4C sets U_INVALID_STATE_ERROR to the return status.
}
// No need to keep all CEs in the buffer when we iterate.
iter_.clearCEsIfNoneRemaining();
long ce = iter_.nextCE();
if (ce == Collation.NO_CE) {
return NULLORDER;
}
// Turn the 64-bit CE into two old-style 32-bit CEs, without quaternary bits.
long p = ce >>> 32;
int lower32 = (int) ce;
int firstHalf = getFirstHalf(p, lower32);
int secondHalf = getSecondHalf(p, lower32);
if (secondHalf != 0) {
otherHalf_ = secondHalf | 0xc0; // continuation CE
}
return firstHalf;
}
/**
* Get the previous collation element in the source string.
*
* <p>This iterator iterates over a sequence of collation elements
* that were built from the string. Because there isn't
* necessarily a one-to-one mapping from characters to collation
* elements, this doesn't mean the same thing as "return the
* collation element [or ordering priority] of the previous
* character in the string".
*
* <p>This function updates the iterator's internal pointer to
* point to the collation element preceding the one it's currently
* pointing to and then returns that element, while next() returns
* the current element and then updates the pointer.
*
* @return the previous collation element, or NULLORDER when the start of
* the iteration has been reached.
*/
public int previous() {
if (dir_ < 0) {
// Continue backwards iteration. Test this first.
if (otherHalf_ != 0) {
int oh = otherHalf_;
otherHalf_ = 0;
return oh;
}
} else if (dir_ == 0) {
iter_.resetToOffset(string_.length());
dir_ = -1;
} else if (dir_ == 1) {
// previous() after setOffset()
dir_ = -1;
} else /* dir_ > 1 */{
// illegal change of direction
throw new IllegalStateException("Illegal change of direction");
// Java porting note: ICU4C sets U_INVALID_STATE_ERROR to the return status.
}
if (offsets_ == null) {
offsets_ = new UVector32();
}
// If we already have expansion CEs, then we also have offsets.
// Otherwise remember the trailing offset in case we need to
// write offsets for an artificial expansion.
int limitOffset = iter_.getCEsLength() == 0 ? iter_.getOffset() : 0;
long ce = iter_.previousCE(offsets_);
if (ce == Collation.NO_CE) {
return NULLORDER;
}
// Turn the 64-bit CE into two old-style 32-bit CEs, without quaternary bits.
long p = ce >>> 32;
int lower32 = (int) ce;
int firstHalf = getFirstHalf(p, lower32);
int secondHalf = getSecondHalf(p, lower32);
if (secondHalf != 0) {
if (offsets_.isEmpty()) {
// When we convert a single 64-bit CE into two 32-bit CEs,
// we need to make this artificial expansion behave like a normal expansion.
// See CollationIterator.previousCE().
offsets_.addElement(iter_.getOffset());
offsets_.addElement(limitOffset);
}
otherHalf_ = firstHalf;
return secondHalf | 0xc0; // continuation CE
}
return firstHalf;
}
/**
* Resets the cursor to the beginning of the string. The next
* call to next() or previous() will return the first and last
* collation element in the string, respectively.
*
* <p>If the RuleBasedCollator used by this iterator has had its
* attributes changed, calling reset() will reinitialize the
* iterator to use the new attributes.
*/
public void reset() {
iter_ .resetToOffset(0);
otherHalf_ = 0;
dir_ = 0;
}
/**
* Sets the iterator to point to the collation element
* corresponding to the character at the specified offset. The
* value returned by the next call to next() will be the collation
* element corresponding to the characters at offset.
*
* <p>If offset is in the middle of a contracting character
* sequence, the iterator is adjusted to the start of the
* contracting sequence. This means that getOffset() is not
* guaranteed to return the same value set by this method.
*
* <p>If the decomposition mode is on, and offset is in the middle
* of a decomposible range of source text, the iterator may not
* return a correct result for the next forwards or backwards
* iteration. The user must ensure that the offset is not in the
* middle of a decomposible range.
*
* @param newOffset the character offset into the original source string to
* set. Note that this is not an offset into the corresponding
* sequence of collation elements.
*/
public void setOffset(int newOffset) {
if (0 < newOffset && newOffset < string_.length()) {
int offset = newOffset;
do {
char c = string_.charAt(offset);
if (!rbc_.isUnsafe(c) ||
(Character.isHighSurrogate(c) && !rbc_.isUnsafe(string_.codePointAt(offset)))) {
break;
}
// Back up to before this unsafe character.
--offset;
} while (offset > 0);
if (offset < newOffset) {
// We might have backed up more than necessary.
// For example, contractions "ch" and "cu" make both 'h' and 'u' unsafe,
// but for text "chu" setOffset(2) should remain at 2
// although we initially back up to offset 0.
// Find the last safe offset no greater than newOffset by iterating forward.
int lastSafeOffset = offset;
do {
iter_.resetToOffset(lastSafeOffset);
do {
iter_.nextCE();
} while ((offset = iter_.getOffset()) == lastSafeOffset);
if (offset <= newOffset) {
lastSafeOffset = offset;
}
} while (offset < newOffset);
newOffset = lastSafeOffset;
}
}
iter_.resetToOffset(newOffset);
otherHalf_ = 0;
dir_ = 1;
}
/**
* Set a new source string for iteration, and reset the offset
* to the beginning of the text.
*
* @param source the new source string for iteration.
*/
public void setText(String source) {
string_ = source; // TODO: do we need to remember the source string in a field?
CollationIterator newIter;
boolean numeric = rbc_.settings.readOnly().isNumeric();
if (rbc_.settings.readOnly().dontCheckFCD()) {
newIter = new UTF16CollationIterator(rbc_.data, numeric, string_, 0);
} else {
newIter = new FCDUTF16CollationIterator(rbc_.data, numeric, string_, 0);
}
iter_ = newIter;
otherHalf_ = 0;
dir_ = 0;
}
/**
* Set a new source string iterator for iteration, and reset the
* offset to the beginning of the text.
*
* <p>The source iterator's integrity will be preserved since a new copy
* will be created for use.
* @param source the new source string iterator for iteration.
*/
public void setText(UCharacterIterator source) {
string_ = source.getText(); // TODO: do we need to remember the source string in a field?
// Note: In C++, we just setText(source.getText()).
// In Java, we actually operate on a character iterator.
// (The old code apparently did so only for a CharacterIterator;
// for a UCharacterIterator it also just used source.getText()).
// TODO: do we need to remember the cloned iterator in a field?
UCharacterIterator src;
try {
src = (UCharacterIterator) source.clone();
} catch (CloneNotSupportedException e) {
// Fall back to ICU 52 behavior of iterating over the text contents
// of the UCharacterIterator.
setText(source.getText());
return;
}
src.setToStart();
CollationIterator newIter;
boolean numeric = rbc_.settings.readOnly().isNumeric();
if (rbc_.settings.readOnly().dontCheckFCD()) {
newIter = new IterCollationIterator(rbc_.data, numeric, src);
} else {
newIter = new FCDIterCollationIterator(rbc_.data, numeric, src, 0);
}
iter_ = newIter;
otherHalf_ = 0;
dir_ = 0;
}
/**
* Set a new source string iterator for iteration, and reset the
* offset to the beginning of the text.
*
* @param source the new source string iterator for iteration.
*/
public void setText(CharacterIterator source) {
// Note: In C++, we just setText(source.getText()).
// In Java, we actually operate on a character iterator.
// TODO: do we need to remember the iterator in a field?
// TODO: apparently we don't clone a CharacterIterator in Java,
// we only clone the text for a UCharacterIterator?? see the old code in the constructors
UCharacterIterator src = new CharacterIteratorWrapper(source);
src.setToStart();
string_ = src.getText(); // TODO: do we need to remember the source string in a field?
CollationIterator newIter;
boolean numeric = rbc_.settings.readOnly().isNumeric();
if (rbc_.settings.readOnly().dontCheckFCD()) {
newIter = new IterCollationIterator(rbc_.data, numeric, src);
} else {
newIter = new FCDIterCollationIterator(rbc_.data, numeric, src, 0);
}
iter_ = newIter;
otherHalf_ = 0;
dir_ = 0;
}
private static final class MaxExpSink implements ContractionsAndExpansions.CESink {
MaxExpSink(Map<Integer, Integer> h) {
maxExpansions = h;
}
@Override
public void handleCE(long ce) {
}
@Override
public void handleExpansion(long ces[], int start, int length) {
if (length <= 1) {
// We do not need to add single CEs into the map.
return;
}
int count = 0; // number of CE "halves"
for (int i = 0; i < length; ++i) {
count += ceNeedsTwoParts(ces[start + i]) ? 2 : 1;
}
// last "half" of the last CE
long ce = ces[start + length - 1];
long p = ce >>> 32;
int lower32 = (int) ce;
int lastHalf = getSecondHalf(p, lower32);
if (lastHalf == 0) {
lastHalf = getFirstHalf(p, lower32);
assert (lastHalf != 0);
} else {
lastHalf |= 0xc0; // old-style continuation CE
}
Integer oldCount = maxExpansions.get(lastHalf);
if (oldCount == null || count > oldCount) {
maxExpansions.put(lastHalf, count);
}
}
private Map<Integer, Integer> maxExpansions;
}
static final Map<Integer, Integer> computeMaxExpansions(CollationData data) {
Map<Integer, Integer> maxExpansions = new HashMap<>();
MaxExpSink sink = new MaxExpSink(maxExpansions);
new ContractionsAndExpansions(null, null, sink, true).forData(data);
return maxExpansions;
}
/**
* Returns the maximum length of any expansion sequence that ends with
* the specified collation element. If there is no expansion with this
* collation element as the last element, returns 1.
*
* @param ce a collation element returned by previous() or next().
* @return the maximum length of any expansion sequence ending
* with the specified collation element.
*/
public int getMaxExpansion(int ce) {
return getMaxExpansion(rbc_.tailoring.maxExpansions, ce);
}
static int getMaxExpansion(Map<Integer, Integer> maxExpansions, int order) {
if (order == 0) {
return 1;
}
Integer max;
if (maxExpansions != null && (max = maxExpansions.get(order)) != null) {
return max;
}
if ((order & 0xc0) == 0xc0) {
// old-style continuation CE
return 2;
} else {
return 1;
}
}
/** Normalizes dir_=1 (just after setOffset()) to dir_=0 (just after reset()). */
private byte normalizeDir() {
return dir_ == 1 ? 0 : dir_;
}
/**
* Tests that argument object is equals to this CollationElementIterator.
* Iterators are equal if the objects uses the same RuleBasedCollator,
* the same source text and have the same current position in iteration.
* @param that object to test if it is equals to this
* CollationElementIterator
*/
@Override
public boolean equals(Object that) {
if (that == this) {
return true;
}
if (that instanceof CollationElementIterator) {
CollationElementIterator thatceiter = (CollationElementIterator) that;
return rbc_.equals(thatceiter.rbc_)
&& otherHalf_ == thatceiter.otherHalf_
&& normalizeDir() == thatceiter.normalizeDir()
&& string_.equals(thatceiter.string_)
&& iter_.equals(thatceiter.iter_);
}
return false;
}
/**
* Mock implementation of hashCode(). This implementation always returns a constant
* value. When Java assertion is enabled, this method triggers an assertion failure.
* @hide deprecated on icu4j-org
*/
@Override
public int hashCode() {
assert false : "hashCode not designed";
return 42;
}
/**
* @deprecated This API is ICU internal only.
* @hide deprecated on icu4j-org
* @hide draft / provisional / internal are hidden on OHOS
*/
@Deprecated
public RuleBasedCollator getRuleBasedCollator() {
return rbc_;
}
}
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package Entidades;
/**
*
* @author PC
*/
public class Banco {
String nombreBanco;
public Banco(String nombreBanco) {
this.nombreBanco = nombreBanco;
}
public Banco() {
}
public String getNombreBanco() {
return nombreBanco;
}
public void setNombreBanco(String nombreBanco) {
this.nombreBanco = nombreBanco;
}
@Override
public String toString() {
return "Banco{" + "nombreBanco=" + nombreBanco + '}';
}
}
|
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
public class Test {
@org.junit.Test
public void run1(){
ApplicationContext ac = new ClassPathXmlApplicationContext("applicationContext.xml");
}
}
|
package org.condast.wph.core.def;
import java.util.Date;
public interface IEventLocation extends ILocation{
public enum EventTypes{
ACTUAL_TIME_OF_DEPARTURE,
ESTIMATED_TIME_OF_ARRIVAL;
@Override
public String toString() {
String str = super.toString();
switch( this ){
case ACTUAL_TIME_OF_DEPARTURE:
str = "ATD";
break;
case ESTIMATED_TIME_OF_ARRIVAL:
str = "ETA";
break;
default:
break;
}
return str;
}
}
/**
* Get the time of the event
* @return
*/
public Date getTime();
/**
* The event type
* @return
*/
public EventTypes getEvent();
}
|
package io.deephaven.kafka.publish;
import io.deephaven.chunk.attributes.Values;
import io.deephaven.engine.table.Table;
import io.deephaven.engine.table.ColumnSource;
import io.deephaven.chunk.Chunk;
import io.deephaven.engine.table.ChunkSource;
import io.deephaven.chunk.ObjectChunk;
import io.deephaven.engine.table.impl.chunkboxer.ChunkBoxer;
import io.deephaven.engine.rowset.RowSequence;
public class SimpleKeyOrValueSerializer<SERIALIZED_TYPE> implements KeyOrValueSerializer<SERIALIZED_TYPE> {
private final ColumnSource<SERIALIZED_TYPE> source;
private final ChunkBoxer.BoxerKernel boxer;
public SimpleKeyOrValueSerializer(Table table, String columnName) {
source = table.getColumnSource(columnName);
boxer = ChunkBoxer.getBoxer(source.getChunkType(), PublishToKafka.CHUNK_SIZE);
}
@SuppressWarnings({"unchecked", "rawtypes"})
@Override
public ObjectChunk<SERIALIZED_TYPE, Values> handleChunk(Context context, RowSequence rowSequence,
boolean previous) {
final SimpleContext simpleContext = (SimpleContext) context;
final Chunk chunk = source.getChunk(simpleContext.sourceGetContext, rowSequence);
return boxer.box(chunk);
}
@Override
public Context makeContext(int size) {
return new SimpleContext(size);
}
private class SimpleContext implements Context {
private final ChunkSource.GetContext sourceGetContext;
private SimpleContext(final int size) {
sourceGetContext = source.makeGetContext(size);
}
@Override
public void close() {
sourceGetContext.close();
}
}
}
|
/*
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.jdbc.support;
import org.junit.jupiter.api.Test;
import org.springframework.dao.DataRetrievalFailureException;
import org.springframework.dao.InvalidDataAccessApiUsageException;
import java.util.HashMap;
import java.util.Map;
import static java.util.Arrays.asList;
import static java.util.Collections.*;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatExceptionOfType;
/**
* Tests for {@link KeyHolder} and {@link GeneratedKeyHolder}.
*
* @author Thomas Risberg
* @author Sam Brannen
* @since July 18, 2004
*/
@SuppressWarnings("serial")
public class KeyHolderTests {
private final KeyHolder kh = new GeneratedKeyHolder();
@Test
public void singleKey() {
kh.getKeyList().addAll(singletonList(singletonMap("key", 1)));
assertThat(kh.getKey().intValue()).as("single key should be returned").isEqualTo(1);
}
@Test
public void singleKeyNonNumeric() {
kh.getKeyList().addAll(singletonList(singletonMap("key", "1")));
assertThatExceptionOfType(DataRetrievalFailureException.class).isThrownBy(() ->
kh.getKey().intValue())
.withMessageStartingWith("The generated key is not of a supported numeric type.");
}
@Test
public void noKeyReturnedInMap() {
kh.getKeyList().addAll(singletonList(emptyMap()));
assertThatExceptionOfType(DataRetrievalFailureException.class).isThrownBy(() ->
kh.getKey())
.withMessageStartingWith("Unable to retrieve the generated key.");
}
@Test
public void multipleKeys() {
Map<String, Object> m = new HashMap<String, Object>() {{
put("key", 1);
put("seq", 2);
}};
kh.getKeyList().addAll(singletonList(m));
assertThat(kh.getKeys().size()).as("two keys should be in the map").isEqualTo(2);
assertThatExceptionOfType(InvalidDataAccessApiUsageException.class).isThrownBy(() ->
kh.getKey())
.withMessageStartingWith("The getKey method should only be used when a single key is returned.");
}
@Test
public void multipleKeyRows() {
Map<String, Object> m = new HashMap<String, Object>() {{
put("key", 1);
put("seq", 2);
}};
kh.getKeyList().addAll(asList(m, m));
assertThat(kh.getKeyList().size()).as("two rows should be in the list").isEqualTo(2);
assertThatExceptionOfType(InvalidDataAccessApiUsageException.class).isThrownBy(() ->
kh.getKeys())
.withMessageStartingWith("The getKeys method should only be used when keys for a single row are returned.");
}
}
|
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyDescription;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.commons.lang.builder.ToStringBuilder;
/**
* GeneToGeneProductRelationship
* <p>
* A gene is transcribed and potentially translated to a gene product
*
*/
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonPropertyOrder({
"object",
"relation",
"subject"
})
public class GeneToGeneProductRelationship {
/**
* connects an association to the object of the association. For example, in a gene-to-phenotype association, the gene is subject and phenotype is object.
*
*/
@JsonProperty("object")
@JsonPropertyDescription("connects an association to the object of the association. For example, in a gene-to-phenotype association, the gene is subject and phenotype is object.")
private String object;
/**
* the relationship type by which a subject is connected to an object in an association
*
*/
@JsonProperty("relation")
@JsonPropertyDescription("the relationship type by which a subject is connected to an object in an association")
private String relation;
/**
* connects an association to the subject of the association. For example, in a gene-to-phenotype association, the gene is subject and phenotype is object.
*
*/
@JsonProperty("subject")
@JsonPropertyDescription("connects an association to the subject of the association. For example, in a gene-to-phenotype association, the gene is subject and phenotype is object.")
private String subject;
/**
* connects an association to the object of the association. For example, in a gene-to-phenotype association, the gene is subject and phenotype is object.
*
*/
@JsonProperty("object")
public String getObject() {
return object;
}
/**
* connects an association to the object of the association. For example, in a gene-to-phenotype association, the gene is subject and phenotype is object.
*
*/
@JsonProperty("object")
public void setObject(String object) {
this.object = object;
}
/**
* the relationship type by which a subject is connected to an object in an association
*
*/
@JsonProperty("relation")
public String getRelation() {
return relation;
}
/**
* the relationship type by which a subject is connected to an object in an association
*
*/
@JsonProperty("relation")
public void setRelation(String relation) {
this.relation = relation;
}
/**
* connects an association to the subject of the association. For example, in a gene-to-phenotype association, the gene is subject and phenotype is object.
*
*/
@JsonProperty("subject")
public String getSubject() {
return subject;
}
/**
* connects an association to the subject of the association. For example, in a gene-to-phenotype association, the gene is subject and phenotype is object.
*
*/
@JsonProperty("subject")
public void setSubject(String subject) {
this.subject = subject;
}
@Override
public String toString() {
return new ToStringBuilder(this).append("object", object).append("relation", relation).append("subject", subject).toString();
}
@Override
public int hashCode() {
return new HashCodeBuilder().append(subject).append(object).append(relation).toHashCode();
}
@Override
public boolean equals(Object other) {
if (other == this) {
return true;
}
if ((other instanceof GeneToGeneProductRelationship) == false) {
return false;
}
GeneToGeneProductRelationship rhs = ((GeneToGeneProductRelationship) other);
return new EqualsBuilder().append(subject, rhs.subject).append(object, rhs.object).append(relation, rhs.relation).isEquals();
}
}
|
package ro.ase.csie.cts.dp.singleton;
public class ConexiuneBD {
String ip;
int port;
String bd;
//referinta catre obiectul unic
//lazy-instantiation
private static ConexiuneBD conexiune = null;
private ConexiuneBD() {
System.out.println("Creare conexiune");
}
private ConexiuneBD(String ip, int port, String bd) {
System.out.println("Creare conexiune");
this.ip = ip;
this.port = port;
this.bd = bd;
}
public static ConexiuneBD getConexiune() {
if(conexiune == null) {
conexiune = new ConexiuneBD();
//preluare valori atribute dintr-un fisier de configurare
conexiune.ip = "127.0.0.1";
conexiune.port = 3306;
conexiune.bd = "licenta";
}
return conexiune;
}
//varianta fara fisier de configurare
//nu este Clean - la apel genereaza ideea ca ai mai multe conexiuni
public static ConexiuneBD getConexiune(String ip, int port, String bd) {
if(conexiune == null) {
conexiune = new ConexiuneBD();
//preluare valori atribute dintr-un fisier de configurare
conexiune.ip = ip;
conexiune.port = port;
conexiune.bd = bd;
}
return conexiune;
}
}
|
/*
* This file is part of SpongeAPI, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.api.item.inventory.properties;
import org.spongepowered.api.item.inventory.equipment.EquipmentTypeWorn;
import org.spongepowered.api.item.inventory.equipment.EquipmentTypes;
import org.spongepowered.api.util.Coerce;
/**
* Inventory property which allows queries to be constructed for a particular
* worn equipment slot type.
*/
public class ArmorSlotType extends EquipmentSlotType {
/**
* Create a new ArmorSlotType property which matches armour slots of the
* specified type.
*
* @param value armour type to match
*/
public ArmorSlotType(EquipmentTypeWorn value) {
super(value);
}
/**
* Create a new ArmorSlotType property which matches armour slots of the
* specified type.
*
* @param value armour type to match
* @param operator logical operator (either EQUAL or NOTEQUAL) to apply
*/
public ArmorSlotType(EquipmentTypeWorn value, Operator operator) {
super(value, operator);
}
/**
* Create a new ArmorSlotType property which matches armour slots of the
* specified type.
*
* @param value armour type to match
* @param operator logical operator (either EQUAL or NOTEQUAL) to apply
*/
public ArmorSlotType(Object value, Operator operator) {
super(Coerce.<EquipmentTypeWorn>toPseudoEnum(value, EquipmentTypeWorn.class, EquipmentTypeWorn.class, EquipmentTypes.WORN), operator);
}
/**
* Create an ArmourSlotType property which matches ArmourSlotType properties
* with equal value.
*
* @param value Type of worn equipment to match
* @return new property
*/
public static ArmorSlotType of(Object value) {
return new ArmorSlotType(value, Operator.EQUAL);
}
/**
* Create an ArmourSlotType property which matches ArmourSlotType properties
* with unequal value.
*
* @param value Type of worn equipment to match
* @return new property
*/
public static ArmorSlotType not(Object value) {
return new ArmorSlotType(value, Operator.NOTEQUAL);
}
}
|
/*
* All content copyright Terracotta, Inc., unless otherwise indicated. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
*/
package example03.CronTrigger;
import java.util.Date;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.quartz.JobKey;
/**
* <p>
* This is just a simple job that gets fired off many times by example 1
* </p>
*
* @author Bill Kratzer
*/
public class SimpleJob implements Job {
private static Logger _log = LoggerFactory.getLogger(SimpleJob.class);
/**
* Empty constructor for job initialization
*/
public SimpleJob() {
}
/**
* <p>
* Called by the <code>{@link org.quartz.Scheduler}</code> when a
* <code>{@link org.quartz.Trigger}</code> fires that is associated with
* the <code>Job</code>.
* </p>
*
* @throws JobExecutionException
* if there is an exception while executing the job.
*/
public void execute(JobExecutionContext context)
throws JobExecutionException {
// This job simply prints out its job name and the
// date and time that it is running
JobKey jobKey = context.getJobDetail().getKey();
_log.info("SimpleJob says: Hello World! -" + jobKey + " executing at " + new Date());
}
}
|
// Copyright 2013 the original author or authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package lichen.migration.testdb;
import lichen.migration.model.SqlType;
import lichen.migration.model.TableDefinition;
import lichen.migration.services.Migration;
import lichen.migration.services.MigrationHelper;
import lichen.migration.services.Options;
import lichen.migration.services.TableCallback;
import javax.inject.Inject;
/**
* @author jcai
*/
public class Migrate_20130722093444_CreateTable implements Migration {
@Inject
private MigrationHelper _helper;
@Inject
private Options _options;
@Override
public void up() throws Throwable {
final int size = 10;
_helper.createTable("test_table", new TableCallback() {
@Override
public void doInTable(TableDefinition t) throws Throwable {
t.bigint("bigint", _options.Comment("主键"));
t.blob("blob");
t.clob("xp");
// t.bool("bool");
// t.charColumn("charColumn");
// t.column("column_", SqlType.BigintType);
//t.decimal("decimal_",Op);
t.integer("int_", _options.Unique());
// t.smallint("sint_");
// t.timestamp("ts");
// t.varbinary("vb");
t.varchar("vc", _options.Limit(size), _options.Default("'asdf'"));
}
}, _options.Comment("测试表"));
_helper.addColumn("test_table", "test_col", SqlType.VarcharType, _options.NotNull(), _options.Comment("测试列"));
_helper.commentTable("test_table", _options.Comment("AAAa啊啊"));
_helper.commentColumn("test_table", "vc", _options.Comment("列1"));
_helper.commentColumn("test_table", "blob", _options.Comment("照片"));
_helper.createSequence("seq_a",_options.Start(5),_options.Increment(1),_options.MinValue(1),_options.MaxValue(100));
_helper.dropSequence("seq_a");
}
@Override
public void down() throws Throwable {
_helper.removeColumn("test_table", "vc");
_helper.dropTable("test_table");
}
}
|
package nextstep.subway.ui.handler;
import nextstep.subway.exception.ServiceException;
import nextstep.subway.exception.line.LineNotFoundException;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.RestControllerAdvice;
@RestControllerAdvice
public class ExceptionAdviser {
@ExceptionHandler(LineNotFoundException.class)
public ResponseEntity<Void> lineNotFoundHandler(LineNotFoundException exception) {
return ResponseEntity.notFound()
.build();
}
@ExceptionHandler(ServiceException.class)
public ResponseEntity<ErrorResponse> serviceHandler(RuntimeException exception) {
return ResponseEntity.badRequest()
.body(new ErrorResponse(exception.getMessage()));
}
}
|
package com.github.maicmiller;
public class Main {
public static void main(String[] args) {
ListaCircular<String> listaCircular = new ListaCircular<>();
listaCircular.add("c0");
System.out.println(listaCircular);
// Vamos resolver o índice 0...
listaCircular.remove(0);
System.out.println(listaCircular);
listaCircular.add("c1");
System.out.println(listaCircular);
listaCircular.add("c2");
listaCircular.add("c3");
System.out.println(listaCircular);
System.out.println(listaCircular.get(0));
System.out.println(listaCircular.get(1));
System.out.println(listaCircular.get(2));
System.out.println(listaCircular.get(3)); // Volta para o loop contínuo...
System.out.println(listaCircular.get(4)); // Volta para o loop contínuo seguindo a ordem dos elementos...
System.out.println("------------------");
for (int i = 0; i < 20; i++){ // Sempre entrando pela cauda percorrendo até a cabeça e voltando para a cauda (loop = 20)...
System.out.println(listaCircular.get(i));
}
}
}
|
/**
* Copyright (C) 2015 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.fabric8.knative.client.serving.v1;
import io.fabric8.kubernetes.client.Client;
import io.fabric8.kubernetes.client.dsl.AnyNamespaceable;
import io.fabric8.kubernetes.client.dsl.Namespaceable;
import io.fabric8.kubernetes.client.dsl.RequestConfigurable;
public interface GenericServingV1Client<C extends Client> extends Client, ServingV1Client,
Namespaceable<C>,
AnyNamespaceable<C>,
RequestConfigurable<C> {
}
|
public class QualSeuNome02 {
public static void main(String[] args) {
/**
* O programa utiliza uma caixa de diálogo JOptionPane predefinida
* chamada ( caixa de diálogo de entrada ) que permite ao
* usuário inserir dados em um programa.
* A linha 15 usa o método showInputDialog de JOptionPane para exibir
* uma caixa de diálogo de entrada que contém um prompt e um campo
* (conhecido como campo de texto) no qual o usuário pode inserir o texto.
* O método showInputDialog retorna uma String contendo os caracteres
* digitados pelo usuário e atribui a variável String nome.
*/
// entre com seu nome ( prompt )
String nome = javax.swing.JOptionPane.showInputDialog(null, "Qual é o seu nome? ");
/**
* A linha 24 utiliza o método static String format para
* retornar uma String que contém uma saudação com o nome do usuário.
* O método format funciona como System.out.printf, exceto que format
* retorna a String formatada em vez de exibi-la em uma janela de comando
* e atribui essa String a variável String mensagem.
*/
// cria mensagem de boas vindas para nome
String mensagem = String.format("Bem vindo, %s a programação java!", nome);
/**
* Tipicamente, as caixas de diálogo são janelas nas quais
* os programas mostram mensagens importantes aos usuários.
* A classe JOptionPane fornece caixas de diálogo pré-construídas
* que permitem aos programas exibir janelas que contêm mensagens
* — essas janelas são chamadas de diálogos de mensagem.
* o programa utiliza a classe JOptionPane do pacote javax.swing.
* Esse pacote contém muitas classes que ajudam a criar
* GUIs para aplicativos.
* A linha 42 chama o método JOptionPane showMessageDialog para
* exibir uma caixa de diálogo que contém uma mensagem
* javax.swing.JOptionPane.showMessageDialog(null, mensagem);
* mostra a mensagem de boas vindas ao nome digitado.
*/
// mostra a mensagem de boas vindas
javax.swing.JOptionPane.showMessageDialog(null, mensagem);
} // fim main
} // fim classe
|
/*
* Copyright © 2019 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package io.cdap.plugin.zuora.objects;
import com.google.gson.annotations.SerializedName;
import io.cdap.cdap.api.data.schema.Schema;
import io.cdap.plugin.zuora.restobjects.annotations.ObjectDefinition;
import io.cdap.plugin.zuora.restobjects.annotations.ObjectFieldDefinition;
import io.cdap.plugin.zuora.restobjects.objects.BaseObject;
import java.util.List;
import javax.annotation.Nullable;
/**
* Object name: ProxyActiongenerateRequest (ProxyActiongenerateRequest)
* Related objects:
**/
@SuppressWarnings("unused")
@ObjectDefinition(
Name = "ProxyActiongenerateRequest",
ObjectType = ObjectDefinition.ObjectDefinitionType.NESTED
)
public class ProxyActiongenerateRequest extends BaseObject {
/**
* Name: objects (objects), Type: array|ZObject
* Options (custom, update, select): false, false, false
**/
@Nullable
@SerializedName("objects")
@ObjectFieldDefinition(FieldType = Schema.Type.ARRAY, NestedClass = "ZObject")
private List<ZObject> objects;
/**
* Name: type (type), Type: string
* Options (custom, update, select): false, false, false
**/
@Nullable
@SerializedName("type")
@ObjectFieldDefinition(FieldType = Schema.Type.STRING)
private String type;
@Override
public void addFields() {
addCustomField("objects", objects, List.class);
addCustomField("type", type, String.class);
}
}
|
package io.fabric8.api.scr;
import java.util.Map;
public interface Configurer {
/**
* Configures the specified instance with the provided configuration.
* @param configuration The configuration.
* @param target The target that will receive the configuration.
* @param <T>
*/
<T> void configure(Map<String, ?> configuration, T target) throws Exception;
}
|
package com.hz.pojo;
import lombok.AllArgsConstructor;
import lombok.Data;
/**
* 交易
* @author CHUANQI.DONG
* @version 创建时间:2018年12月2日 上午9:50:38
* @ClassName 类名称
* @Description 类描述
*
*/
@Data
@AllArgsConstructor
public class Transaction {
private final Trader trader;
private final int year;
private final int value;
}
|
/*
* Copyright 2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.internal.event;
import org.gradle.api.Action;
import org.gradle.internal.Cast;
import org.gradle.internal.dispatch.Dispatch;
import org.gradle.internal.dispatch.MethodInvocation;
import org.gradle.internal.dispatch.ReflectionDispatch;
import org.gradle.util.internal.CollectionUtils;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Set;
/**
* An immutable composite {@link org.gradle.internal.dispatch.Dispatch} implementation. Optimized for a small number of elements, and for infrequent modification.
*/
public abstract class BroadcastDispatch<T> extends AbstractBroadcastDispatch<T> {
private BroadcastDispatch(Class<T> type) {
super(type);
}
public static <T> BroadcastDispatch<T> empty(Class<T> type) {
return new EmptyDispatch<T>(type);
}
public Class<T> getType() {
return type;
}
public abstract boolean isEmpty();
public BroadcastDispatch<T> add(Dispatch<MethodInvocation> dispatch) {
return add(dispatch, dispatch);
}
public BroadcastDispatch<T> add(T listener) {
return add(listener, new ReflectionDispatch(listener));
}
public BroadcastDispatch<T> add(String methodName, Action<?> action) {
assertIsMethod(methodName);
return add(action, new ActionInvocationHandler(methodName, Cast.<Action<Object>>uncheckedNonnullCast(action)));
}
abstract BroadcastDispatch<T> add(Object handler, Dispatch<MethodInvocation> dispatch);
private void assertIsMethod(String methodName) {
for (Method method : type.getMethods()) {
if (method.getName().equals(methodName)) {
return;
}
}
throw new IllegalArgumentException(String.format("Method %s() not found for listener type %s.", methodName,
type.getSimpleName()));
}
public abstract BroadcastDispatch<T> remove(Object listener);
public abstract BroadcastDispatch<T> addAll(Collection<? extends T> listeners);
public abstract BroadcastDispatch<T> removeAll(Collection<?> listeners);
public abstract void visitListeners(Action<T> visitor);
private static class ActionInvocationHandler implements Dispatch<MethodInvocation> {
private final String methodName;
private final Action<Object> action;
ActionInvocationHandler(String methodName, Action<Object> action) {
this.methodName = methodName;
this.action = action;
}
@Override
public void dispatch(MethodInvocation message) {
if (message.getMethod().getName().equals(methodName)) {
action.execute(message.getArguments()[0]);
}
}
}
private static class EmptyDispatch<T> extends BroadcastDispatch<T> {
EmptyDispatch(Class<T> type) {
super(type);
}
@Override
public String toString() {
return "<empty>";
}
@Override
public boolean isEmpty() {
return true;
}
@Override
public BroadcastDispatch<T> remove(Object listener) {
return this;
}
@Override
public BroadcastDispatch<T> removeAll(Collection<?> listeners) {
return this;
}
@Override
BroadcastDispatch<T> add(Object handler, Dispatch<MethodInvocation> dispatch) {
return new SingletonDispatch<T>(type, handler, dispatch);
}
@Override
public void visitListeners(Action<T> visitor) {
}
@Override
public BroadcastDispatch<T> addAll(Collection<? extends T> listeners) {
List<SingletonDispatch<T>> result = new ArrayList<SingletonDispatch<T>>();
for (T listener : listeners) {
SingletonDispatch<T> dispatch = new SingletonDispatch<T>(type, listener, new ReflectionDispatch(listener));
if (!result.contains(dispatch)) {
result.add(dispatch);
}
}
if (result.isEmpty()) {
return this;
}
if (result.size() == 1) {
return result.iterator().next();
}
return new CompositeDispatch<T>(type, result);
}
@Override
public void dispatch(MethodInvocation message) {
}
}
private static class SingletonDispatch<T> extends BroadcastDispatch<T> {
private final Object handler;
private final Dispatch<MethodInvocation> dispatch;
SingletonDispatch(Class<T> type, Object handler, Dispatch<MethodInvocation> dispatch) {
super(type);
this.handler = handler;
this.dispatch = dispatch;
}
@Override
public String toString() {
return handler.toString();
}
@Override
public boolean equals(Object obj) {
SingletonDispatch<T> other = Cast.uncheckedNonnullCast(obj);
return handler == other.handler || handler.equals(other.handler);
}
@Override
public int hashCode() {
return handler.hashCode();
}
@Override
BroadcastDispatch<T> add(Object handler, Dispatch<MethodInvocation> dispatch) {
if (this.handler == handler || this.handler.equals(handler)) {
return this;
}
List<SingletonDispatch<T>> result = new ArrayList<SingletonDispatch<T>>();
result.add(this);
result.add(new SingletonDispatch<T>(type, handler, dispatch));
return new CompositeDispatch<T>(type, result);
}
@Override
public BroadcastDispatch<T> addAll(Collection<? extends T> listeners) {
List<SingletonDispatch<T>> result = new ArrayList<SingletonDispatch<T>>();
result.add(this);
for (T listener : listeners) {
if (handler == listener || handler.equals(listener)) {
continue;
}
SingletonDispatch<T> dispatch = new SingletonDispatch<T>(type, listener, new ReflectionDispatch(listener));
if (!result.contains(dispatch)) {
result.add(dispatch);
}
}
if (result.size() == 1) {
return this;
}
return new CompositeDispatch<T>(type, result);
}
@Override
public BroadcastDispatch<T> remove(Object listener) {
if (handler == listener || handler.equals(listener)) {
return new EmptyDispatch<T>(type);
}
return this;
}
@Override
public BroadcastDispatch<T> removeAll(Collection<?> listeners) {
for (Object listener : listeners) {
if (handler == listener || handler.equals(listener)) {
return new EmptyDispatch<T>(type);
}
}
return this;
}
@Override
public boolean isEmpty() {
return false;
}
@Override
public void visitListeners(Action<T> visitor) {
if (getType().isInstance(handler)) {
visitor.execute(getType().cast(handler));
}
}
@Override
public void dispatch(MethodInvocation message) {
dispatch(message, dispatch);
}
}
private static class CompositeDispatch<T> extends BroadcastDispatch<T> {
private final List<SingletonDispatch<T>> dispatchers;
CompositeDispatch(Class<T> type, List<SingletonDispatch<T>> dispatchers) {
super(type);
this.dispatchers = dispatchers;
}
@Override
public String toString() {
return dispatchers.toString();
}
@Override
BroadcastDispatch<T> add(Object handler, Dispatch<MethodInvocation> dispatch) {
List<SingletonDispatch<T>> result = new ArrayList<SingletonDispatch<T>>();
for (SingletonDispatch<T> listener : dispatchers) {
if (listener.handler == handler || listener.handler.equals(handler)) {
return this;
}
result.add(listener);
}
result.add(new SingletonDispatch<T>(type, handler, dispatch));
return new CompositeDispatch<T>(type, result);
}
@Override
public BroadcastDispatch<T> addAll(Collection<? extends T> listeners) {
List<SingletonDispatch<T>> result = new ArrayList<SingletonDispatch<T>>();
result.addAll(dispatchers);
for (T listener : listeners) {
SingletonDispatch<T> dispatch = new SingletonDispatch<T>(type, listener, new ReflectionDispatch(listener));
if (!result.contains(dispatch)) {
result.add(dispatch);
}
}
if (result.equals(dispatchers)) {
return this;
}
return new CompositeDispatch<T>(type, result);
}
@Override
public BroadcastDispatch<T> remove(Object listener) {
List<SingletonDispatch<T>> result = new ArrayList<SingletonDispatch<T>>();
boolean found = false;
for (SingletonDispatch<T> dispatch : dispatchers) {
if (dispatch.handler == listener || dispatch.handler.equals(listener)) {
found = true;
} else {
result.add(dispatch);
}
}
if (!found) {
return this;
}
if (result.size() == 1) {
return result.iterator().next();
}
return new CompositeDispatch<T>(type, result);
}
@Override
public BroadcastDispatch<T> removeAll(Collection<?> listeners) {
Set<Object> listenerList = CollectionUtils.toSet(listeners);
List<SingletonDispatch<T>> result = new ArrayList<SingletonDispatch<T>>();
for (SingletonDispatch<T> dispatch : this.dispatchers) {
if (!listenerList.contains(dispatch.handler)) {
result.add(dispatch);
}
}
if (result.size() == 0) {
return new EmptyDispatch<T>(type);
}
if (result.size() == 1) {
return result.iterator().next();
}
if (result.equals(this.dispatchers)) {
return this;
}
return new CompositeDispatch<T>(type, result);
}
@Override
public void visitListeners(Action<T> visitor) {
for (SingletonDispatch<T> dispatcher : dispatchers) {
dispatcher.visitListeners(visitor);
}
}
@Override
public boolean isEmpty() {
return false;
}
@Override
public void dispatch(MethodInvocation message) {
dispatch(message, dispatchers.iterator());
}
}
}
|
/*
* Copyright ConsenSys AG.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*
*/
package org.enterchain.enter.ethereum.api.jsonrpc.internal.results.tracing.diff;
import java.io.IOException;
import java.util.Optional;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
@JsonSerialize(using = DiffNode.Serializer.class)
public final class DiffNode {
private final Optional<String> from;
private final Optional<String> to;
DiffNode(final String from, final String to) {
this.from = Optional.ofNullable(from);
this.to = Optional.ofNullable(to);
}
DiffNode(final Optional<String> from, final Optional<String> to) {
this.from = from;
this.to = to;
}
boolean hasDifference() {
return from.map(it -> !it.equals(to.get())).orElse(to.isPresent());
}
public static class Serializer extends StdSerializer<DiffNode> {
public Serializer() {
this(null);
}
protected Serializer(final Class<DiffNode> t) {
super(t);
}
@Override
public void serialize(
final DiffNode value, final JsonGenerator gen, final SerializerProvider provider)
throws IOException {
if (value.from.isPresent()) {
if (value.to.isPresent()) {
if (value.from.get().equalsIgnoreCase(value.to.get())) {
gen.writeString("=");
} else {
gen.writeStartObject();
gen.writeObjectFieldStart("*");
gen.writeObjectField("from", value.from.get());
gen.writeObjectField("to", value.to.get());
gen.writeEndObject();
gen.writeEndObject();
}
} else {
gen.writeStartObject();
gen.writeObjectField("-", value.from.get());
gen.writeEndObject();
}
} else {
if (value.to.isPresent()) {
gen.writeStartObject();
gen.writeObjectField("+", value.to.get());
gen.writeEndObject();
} else {
gen.writeString("=");
}
}
}
}
}
|
/*
* Copyright 2007 Outerthought bvba and Schaubroeck nv
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.outerj.daisy.sync.test;
import java.util.List;
import junit.framework.TestCase;
import org.outerj.daisy.sync.Attribute;
import org.outerj.daisy.sync.AttributeImpl;
import org.outerj.daisy.sync.Entity;
import org.outerj.daisy.sync.EntityImpl;
import org.outerj.daisy.sync.EntityNotFoundException;
import org.outerj.daisy.sync.SyncState;
import org.outerj.daisy.sync.Synchronizer;
import org.outerj.daisy.sync.SystemState;
import org.outerj.daisy.sync.dao.Locker;
import org.outerj.daisy.sync.dao.test.InMemoryLocker;
import org.outerj.daisy.sync.dao.test.MockExternalEntityDao;
import org.outerj.daisy.sync.dao.test.MockInternalEntityDao;
import org.outerj.daisy.sync.dao.test.MockSyncEntityDao;
import org.outerj.daisy.sync.mapping.MappingConfiguration;
public class SynchronizerTest extends TestCase {
private MappingConfiguration mappingConfiguration;
private MockExternalEntityDao externalDao;
private MockSyncEntityDao syncDao;
private MockInternalEntityDao daisyDao;
private Synchronizer synchronizer;
private Entity extEntity;
private Locker locker;
protected void setUp() throws Exception {
mappingConfiguration = new MappingConfiguration(this.getClass().getClassLoader().getResourceAsStream("mapping-test.xml"), null, null);
externalDao = new MockExternalEntityDao();
syncDao = new MockSyncEntityDao();
locker = new InMemoryLocker();
daisyDao = new MockInternalEntityDao();
synchronizer = new Synchronizer(mappingConfiguration, externalDao, syncDao, daisyDao, locker);
String entityName = "plainEntity";
long extId = 5579;
extEntity = new EntityImpl();
extEntity.setName(entityName);
extEntity.setInternalName("SimpleDocument");
extEntity.setExternalId(extId);
extEntity.addAttribute(new AttributeImpl("attributeOne", "one value here"));
extEntity.addAttribute(new AttributeImpl("entityName", "this is my name"));
extEntity.setLanguage("mylang");
externalDao.addEntity(extEntity);
super.setUp();
}
// only the syncing is tested here not the mapping
public void testNewEntity() throws EntityNotFoundException{
// CASE 1
assertTrue(synchronizer.startSync(true));
waitForLock();
Entity syncEntity = syncDao.getEntity(extEntity.getDaisyVariantKey());
assertNotNull(extEntity.getDaisyVariantKey());
assertEquals(SyncState.SYNC_EXT2DSY, syncEntity.getState());
}
public void testEntityUpdateExternal() throws EntityNotFoundException{
// CASE 2, 5, 11
synchronizer.startSync(true);
waitForLock();
assertNotNull(extEntity.getDaisyVariantKey());
List<String> values = extEntity.getAttributeByExternalName("attributeOne").getValues();
values.clear();
values.add("a new value");
assertNotSame(extEntity, syncDao.getEntity(extEntity.getDaisyVariantKey()));
synchronizer.startSync(true);
waitForLock();
Entity dsyEntity = daisyDao.getEntity(extEntity.getDaisyVariantKey());
Entity syncEntity = syncDao.getEntity(extEntity.getDaisyVariantKey());
assertEquals(extEntity.getAttributeByExternalName("attributeOne").getValues().get(0), syncEntity
.getAttributeByExternalName("attributeOne").getValues().get(0));
assertEquals(extEntity.getAttributeByExternalName("attributeOne").getValues().get(0), dsyEntity
.getAttributeByExternalName("attributeOne").getValues().get(0));
}
public void testEntityUpdateInternal() throws EntityNotFoundException {
// CASE 3, 4, 11
synchronizer.startSync(true);
waitForLock();
assertNotNull(extEntity.getDaisyVariantKey());
Entity dsyEntity = daisyDao.getEntity(extEntity.getDaisyVariantKey());
List<String> values = dsyEntity.getAttributeByExternalName("attributeOne").getValues();
values.clear();
values.add("a new value -- from daisy");
daisyDao.storeEntity(dsyEntity);
assertNotSame(dsyEntity, syncDao.getEntity(extEntity.getDaisyVariantKey()));
synchronizer.startSync(true);
waitForLock();
Entity syncEntity = syncDao.getEntity(extEntity.getDaisyVariantKey());
assertEquals(SyncState.DSY_OVERWRITE, syncEntity.getState());
// the sync holds on to external values in an overwrite
assertEquals(syncEntity.getAttributeByExternalName("attributeOne").getValues().get(0), this.extEntity
.getAttributeByExternalName("attributeOne").getValues().get(0));
}
public void testEntityDeleteInternal() throws EntityNotFoundException {
// CASE 12
synchronizer.startSync(true);
waitForLock();
assertNotNull(extEntity.getDaisyVariantKey());
daisyDao.removeEntity(extEntity.getDaisyVariantKey());
synchronizer.startSync(true);
waitForLock();
Entity syncEntity = syncDao.getEntity(extEntity.getDaisyVariantKey());
assertTrue(syncEntity.isDaisyDeleted());
assertFalse(syncEntity.isExternalDeleted());
assertEquals(SyncState.DSY_OVERWRITE, syncEntity.getState());
}
public void testEntityDeleteExternal() throws EntityNotFoundException{
synchronizer.startSync(true);
waitForLock();
Entity syncEntity = syncDao.getEntity(extEntity.getDaisyVariantKey());
assertFalse(syncEntity.isDaisyDeleted());
assertFalse(syncEntity.isExternalDeleted());
assertNotNull(extEntity.getDaisyVariantKey());
externalDao.getNamedEntities().get(extEntity.getName()).remove(extEntity);
synchronizer.startSync(true);
waitForLock();
syncEntity = syncDao.getEntity(extEntity.getDaisyVariantKey());
assertTrue(syncEntity.isDaisyDeleted());
assertTrue(syncEntity.isExternalDeleted());
}
public void testStartSync() {
// LockState : IDLE
assertFalse(synchronizer.startSync());
waitForLock();
assertTrue(synchronizer.startSync(true));
waitForLock();
assertTrue(locker.changeLockState(SystemState.IDLE, SystemState.EXT_UPDATE));
assertFalse(synchronizer.startSync());
waitForLock();
assertFalse(synchronizer.startSync(true));
waitForLock();
assertTrue(locker.changeLockState( SystemState.EXT_UPDATE, SystemState.AWAITING_SYNC));
assertTrue(synchronizer.startSync());
waitForLock();
assertTrue(synchronizer.startSync(true));
}
public void testEntityResurrect() throws EntityNotFoundException{
createResurrectScenario();
synchronizer.startSync(true);
waitForLock();
Entity syncEntity = syncDao.getEntity(extEntity.getDaisyVariantKey());
Entity dsyEntity = daisyDao.getEntity(syncEntity.getDaisyVariantKey());
assertFalse(syncEntity.isDaisyDeleted());
assertFalse(syncEntity.isExternalDeleted());
assertFalse(dsyEntity.isDaisyDeleted());
}
public void testEntityResurrectExtChange() throws EntityNotFoundException {
createResurrectScenario();
String newValue = "after resurect value";
Attribute attr = extEntity.getAttributeByExternalName("attributeOne");
attr.getValues().clear();
attr.addValue(newValue);
externalDao.addEntity(extEntity);
synchronizer.startSync(true);
waitForLock();
Entity syncEntity = syncDao.getEntity(extEntity.getDaisyVariantKey());
Entity dsyEntity = daisyDao.getEntity(syncEntity.getDaisyVariantKey());
assertFalse(syncEntity.isDaisyDeleted());
assertFalse(syncEntity.isExternalDeleted());
assertFalse(dsyEntity.isDaisyDeleted());
assertEquals(syncEntity.getAttributeByExternalName("attributeOne").getValues().get(0), newValue);
assertEquals(dsyEntity.getAttributeByExternalName("attributeOne").getValues().get(0), newValue);
}
public void testEntityResurrectDsyChange() throws EntityNotFoundException {
createResurrectScenario();
Entity dsyEntity = daisyDao.getEntity(extEntity.getDaisyVariantKey());
String newValue = "change daisy value";
Attribute attr = dsyEntity.getAttributeByExternalName("attributeOne");
attr.getValues().clear();
attr.addValue(newValue);
dsyEntity.setDaisyDeleted(false);
synchronizer.startSync(true);
waitForLock();
Entity syncEntity = syncDao.getEntity(extEntity.getDaisyVariantKey());
dsyEntity = daisyDao.getEntity(syncEntity.getDaisyVariantKey());
assertEquals(SyncState.CONFLICT, syncEntity.getState());
assertTrue(syncEntity.isDaisyDeleted()); // this shouldn't change untill the conflict has been resolved
assertFalse(syncEntity.isExternalDeleted());
assertFalse(dsyEntity.isDaisyDeleted());
// the sync holds on to external values in an overwrite
assertEquals(syncEntity.getAttributeByExternalName("attributeOne").getValues().get(0), this.extEntity
.getAttributeByExternalName("attributeOne").getValues().get(0));
}
public void testEntityResurrectDsyUnretire() throws EntityNotFoundException{
createResurrectScenario();
Entity dsyEntity = daisyDao.getEntity(extEntity.getDaisyVariantKey());
dsyEntity.setDaisyDeleted(false);
synchronizer.startSync(true);
waitForLock();
Entity syncEntity = syncDao.getEntity(extEntity.getDaisyVariantKey());
dsyEntity = daisyDao.getEntity(syncEntity.getDaisyVariantKey());
assertEquals(SyncState.SYNC_EXT2DSY, syncEntity.getState());
assertFalse(syncEntity.isDaisyDeleted());
assertFalse(syncEntity.isExternalDeleted());
assertFalse(dsyEntity.isDaisyDeleted());
}
public void testEntityResurrectDsyHardDelete() throws EntityNotFoundException{
createResurrectScenario();
// when an entity is resurrected it will bring back the daisy document too.
daisyDao.removeEntity(extEntity.getDaisyVariantKey());
try {
daisyDao.getEntity(extEntity.getDaisyVariantKey());
fail("this should give an exception since the entity doesn't exist anymore.");
} catch (EntityNotFoundException e) {
// do nothing
}
synchronizer.startSync(true);
waitForLock();
Entity syncEntity = syncDao.getEntity(extEntity.getDaisyVariantKey());
Entity dsyEntity = daisyDao.getEntity(syncEntity.getDaisyVariantKey());
assertNotNull(daisyDao.getEntity(extEntity.getDaisyVariantKey()));
assertEquals(SyncState.SYNC_EXT2DSY, syncEntity.getState());
assertFalse(syncEntity.isDaisyDeleted());
assertFalse(syncEntity.isExternalDeleted());
assertFalse(dsyEntity.isDaisyDeleted());
}
private void waitForLock() {
try {
Thread.sleep(100);
while (locker.getLockState() == SystemState.SYNCING) {
Thread.sleep(100);
}
} catch (InterruptedException e) {
e.printStackTrace();
fail("What happend here");
}
}
private void createResurrectScenario() throws EntityNotFoundException{
synchronizer.startSync(true);
waitForLock();
Entity syncEntity = syncDao.getEntity(extEntity.getDaisyVariantKey());
assertFalse(syncEntity.isDaisyDeleted());
assertFalse(syncEntity.isExternalDeleted());
assertNotNull(extEntity.getDaisyVariantKey());
externalDao.getNamedEntities().get(extEntity.getName()).remove(extEntity);
synchronizer.startSync(true);
waitForLock();
syncEntity = syncDao.getEntity(extEntity.getDaisyVariantKey());
Entity dsyEntity = daisyDao.getEntity(syncEntity.getDaisyVariantKey());
assertTrue(syncEntity.isDaisyDeleted());
assertTrue(syncEntity.isExternalDeleted());
assertTrue(dsyEntity.isDaisyDeleted());
externalDao.addEntity(extEntity);
}
}
|
package org.zhongweixian.cc.service.impl;
import com.alibaba.fastjson.JSON;
import org.cti.cc.entity.CallDetail;
import org.cti.cc.entity.CallDevice;
import org.cti.cc.entity.CallLog;
import org.cti.cc.entity.PushFailLog;
import org.cti.cc.mapper.CallDetailMapper;
import org.cti.cc.mapper.CallDeviceMapper;
import org.cti.cc.mapper.CallLogMapper;
import org.cti.cc.mapper.PushFailLogMapper;
import org.cti.cc.mapper.base.BaseMapper;
import org.cti.cc.po.CallLogPo;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import org.zhongweixian.cc.configration.mq.RabbitConfig;
import org.zhongweixian.cc.service.CallCdrService;
/**
* Create by caoliang on 2020/10/28
*/
@Component
public class CallCdrServiceImpl extends BaseServiceImpl<CallLog> implements CallCdrService {
@Autowired
private CallLogMapper callLogMapper;
@Autowired
private CallDetailMapper callDetailMapper;
@Autowired
private CallDeviceMapper callDeviceMapper;
@Autowired
private PushFailLogMapper pushFailLogMapper;
@Autowired
private RabbitTemplate rabbitTemplate;
@Value("${call.cdr.mq:0}")
private Integer callCdrMq;
@Override
BaseMapper<CallLog> baseMapper() {
return callLogMapper;
}
@Override
public int saveCallDevice(CallDevice callDevice) {
if (callCdrMq == 1) {
rabbitTemplate.convertAndSend(RabbitConfig.CALL_DEVICE_EXCHANGE, RabbitConfig.CALL_DEVICE_ROUTING, JSON.toJSONString(callDevice));
return 0;
}
return callDeviceMapper.insertSelective(callDevice);
}
@Override
public int saveCallDetail(CallDetail callDetail) {
return callDetailMapper.insertSelective(callDetail);
}
@Override
public int saveOrUpdateCallLog(CallLog callLog) {
if (callLog == null) {
return 0;
}
logger.info("callId:{} , answerTime:{}", callLog.getCallId(), callLog.getAnswerTime());
if (callCdrMq == 1) {
rabbitTemplate.convertAndSend(RabbitConfig.CALL_LOG_EXCHANGE, RabbitConfig.CALL_LOG_ROUTING, JSON.toJSONString(callLog));
return 0;
}
if (callLog.getAnswerTime() != null && callLog.getEndTime() == null) {
//呼通
return callLogMapper.insertSelective(callLog);
}
if (callLog.getAnswerTime() == null && callLog.getEndTime() != null) {
//没有呼通
return callLogMapper.insertSelective(callLog);
}
int result = callLogMapper.updateByCallId(callLog);
if (result == 0) {
result = callLogMapper.insertSelective(callLog);
}
return result;
}
@Override
public CallLogPo getCall(Long companyId, Long callId) {
return callLogMapper.getCall(companyId, callId);
}
@Override
public int savePushFailLog(PushFailLog pushFailLog) {
return pushFailLogMapper.insertSelective(pushFailLog);
}
}
|
package com.technology.oracle.scheduler.batchrole.client.ui.form.detail;
import static com.technology.oracle.scheduler.batchrole.client.BatchRoleClientConstant.batchRoleText;
import static com.technology.oracle.scheduler.batchrole.shared.field.BatchRoleFieldNames.PRIVILEGE_CODE;
import static com.technology.oracle.scheduler.batchrole.shared.field.BatchRoleFieldNames.ROLE_ID;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.user.client.ui.ScrollPanel;
import com.google.gwt.user.client.ui.VerticalPanel;
import com.technology.jep.jepria.client.ui.form.detail.DetailFormViewImpl;
import com.technology.jep.jepria.client.widget.field.FieldManager;
import com.technology.jep.jepria.client.widget.field.multistate.JepComboBoxField;
public class BatchRoleDetailFormViewImpl extends DetailFormViewImpl {
public BatchRoleDetailFormViewImpl() {
super(new FieldManager());
ScrollPanel scrollPanel = new ScrollPanel();
scrollPanel.setSize("100%", "100%");
VerticalPanel panel = new VerticalPanel();
panel.getElement().getStyle().setMarginTop(5, Unit.PX);
scrollPanel.add(panel);
JepComboBoxField privilegeCodeComboBoxField = new JepComboBoxField(batchRoleText.batchRole_detail_privilege_code());
privilegeCodeComboBoxField.setFieldWidth(300);
JepComboBoxField roleIdComboBoxField = new JepComboBoxField(batchRoleText.batchRole_detail_role_id());
roleIdComboBoxField.setEmptyText(batchRoleText.batchRole_detail_role_id_emptyText());
roleIdComboBoxField.setFieldWidth(300);
panel.add(privilegeCodeComboBoxField);
panel.add(roleIdComboBoxField);
setWidget(scrollPanel);
fields.put(PRIVILEGE_CODE, privilegeCodeComboBoxField);
fields.put(ROLE_ID, roleIdComboBoxField);
}
}
|
package main;
import java.util.Scanner;
public class Principal {
static Scanner sc = new Scanner(System.in);
final static int num = 2; //Numero de aeropuertos
static AereoPuerto aereoPuerto[]= new AereoPuerto[num];
static String nombreAereopuerto;
static AereoPuerto aero;
public static void main(String[] args) {
//Insertar datos de los aereopuertos
insertarDataosAereopurto(aereoPuerto);
menu();
}
public static void insertarDataosAereopurto(AereoPuerto aero[]){
aero[0]=new AereoPuertoPublico("JOAQUIN VALDEZ","Lima","Peru",8000000);
aero[0].insertarComapñia(new Compañia("AereoPeru"));
aero[0].insertarComapñia(new Compañia("LATAM"));
aero[0].getCompañia("AereoPeru").insertarVuelo(new Vuelo("IB20","Lima","CDMX",150.90,150));
aero[0].getCompañia("AereoPeru").insertarVuelo(new Vuelo("IB21","LIMA","BJS",190.08,120));
aero[0].getCompañia("LATAM").insertarVuelo(new Vuelo("FC12","Lima","Paris",450.90,180));
aero[0].getCompañia("AereoPeru").getVuelo("IB20").insertarPasajero(new Pasajero("CHUZ","20384","Mexicana"));
aero[0].getCompañia("AereoPeru").getVuelo("IB20").insertarPasajero(new Pasajero("Maria","39721","Mexicana"));
aero[0].getCompañia("LATAM").getVuelo("FC12").insertarPasajero(new Pasajero("Raul","jh162","Español"));
aero[1]=new AereoPuertoPrivado("Mexico T1","CDMX","Mexico");
aero[1].insertarComapñia(new Compañia("AereoPeru"));
aero[1].insertarComapñia(new Compañia("LATAM"));
aero[1].getCompañia("AereoPeru").insertarVuelo(new Vuelo("IB20","Lima","CDMX",150.90,150));
aero[1].getCompañia("AereoPeru").insertarVuelo(new Vuelo("IB21","LIMA","BJS",190.08,120));
aero[1].getCompañia("LATAM").insertarVuelo(new Vuelo("FC12","Lima","Paris",450.90,180));
aero[1].getCompañia("AereoPeru").getVuelo("IB20").insertarPasajero(new Pasajero("CHUZ","20384","Mexica"));
aero[1].getCompañia("AereoPeru").getVuelo("IB20").insertarPasajero(new Pasajero("Maria","39721","Mexicana"));
aero[1].getCompañia("LATAM").getVuelo("FC12").insertarPasajero(new Pasajero("Raul","jh162","Español"));
}
public static void menu(){
int opcion;
do{
System.out.println("\t Menu");
System.out.println("1. Ver aereopuertos gestionados (Publicos o Privados)");
System.out.println("2. Ver empresas(Privado) o subvencios(Publico");
System.out.println("3. Lista de compañias de un Aereopuerto");
System.out.println("4. Lista de vuelos por comapñia");
System.out.println("5. Listar posibles vuelos de Oriegen a Destino");
System.out.println("6. Salir");
System.out.println("Opcion: ");
opcion=sc.nextInt();
switch (opcion){
case 1: mostrarDatosAereopurtos(aereoPuerto); break;
case 2: mostrarPatrocinio(aereoPuerto); break;
case 3:
sc.nextLine();
System.out.println("Didige el nombre del aereopuerto: ");
nombreAereopuerto=sc.next();
aero=buscarAereopuerto(nombreAereopuerto,aereoPuerto);
if (aero==null){
System.out.println("Aereopuerto no existe");
} else {
// mostratCompalia(aero);
}
break;
case 4: break;
case 5: break;
case 6: break;
default:
System.out.println("Error see quivo de opcion de menu");
}
System.out.println("");
} while (opcion!=6);
}
public static void mostrarDatosAereopurtos(AereoPuerto aereoouertos[]){
for (int i = 0; i <aereoouertos.length ; i++) {
if (aereoouertos[i] instanceof AereoPuertoPrivado ){
System.out.println("Es un aereoPuerto Privado. ");
System.out.println("Nombre: "+aereoouertos[i].getNombre());
System.out.println("Ciudad: "+aereoouertos[i].getCiudad());
System.out.println("Pais: "+aereoouertos[i].getPais());
} else {
System.out.println("Es un aereoPuerto Publico. ");
System.out.println("Nombre: "+aereoouertos[i].getNombre());
System.out.println("Ciudad: "+aereoouertos[i].getCiudad());
System.out.println("Pais: "+aereoouertos[i].getPais());
}
}
}
public static void mostrarPatrocinio(AereoPuerto aereoPuerto[]){
String empresas[];
for (int i = 0; i <aereoPuerto.length ; i++) {
if (aereoPuerto[i] instanceof AereoPuertoPrivado){
System.out.println("Areopurto Privado: "+aereoPuerto[i].getNombre());
empresas=((AereoPuertoPrivado)aereoPuerto[i]).getListaempresas();
System.out.println("Empresas: ");
for (int j = 0; j <empresas.length ; j++) {
System.out.println(empresas[j]);
}
} else{
System.out.println("AereoPuerto Publico: "+aereoPuerto[i].getNombre());
System.out.println("SUbvencion"+((AereoPuertoPublico)aereoPuerto[i]).getSubvencion());
}
System.out.println("");
}
}
public static AereoPuerto buscarAereopuerto(String nombre, AereoPuerto aereoPuerto[]){
boolean encontrado = false;
int i=0;
AereoPuerto aereo = null;
while((!encontrado)&& i<aereoPuerto.length){
if (nombre.equals(aereoPuerto[i].getNombre())){
encontrado=true;
aereo=aereoPuerto[i];
}
i++;
}
return aereo;
}
public static void mostratCompalia(AereoPuerto aereoPuerto){
System.out.println("\n las compañias del aerorpurto: "+aereoPuerto.getNombre());
for (int i = 0; i <aereoPuerto.getNumCompañia() ; i++) {
System.out.println(aereoPuerto.getCompañia(i).getNombre());
}
}
}
|
/**
* <copyright>
*
* Copyright (c) 2010 SAP AG.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Reiner Hille-Doering (SAP AG) - initial API and implementation and/or initial documentation
*
* </copyright>
*/
package org.eclipse.dd.di.impl;
import com.google.gwt.user.client.rpc.GwtTransient;
import java.util.Collection;
import java.util.Map;
import org.eclipse.dd.di.DiPackage;
import org.eclipse.dd.di.DiagramElement;
import org.eclipse.dd.di.Plane;
import org.eclipse.dd.di.util.DiValidator;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.BasicDiagnostic;
import org.eclipse.emf.common.util.Diagnostic;
import org.eclipse.emf.common.util.DiagnosticChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.plugin.EcorePlugin;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.EObjectValidator;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Plane</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link org.eclipse.dd.di.impl.PlaneImpl#getPlaneElement <em>Plane Element</em>}</li>
* </ul>
*
* @generated
*/
public abstract class PlaneImpl extends NodeImpl implements Plane {
/**
* The cached value of the '{@link #getPlaneElement() <em>Plane Element</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getPlaneElement()
* @generated
* @ordered
*/
@GwtTransient
protected EList<DiagramElement> planeElement;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected PlaneImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return DiPackage.Literals.PLANE;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public EList<DiagramElement> getPlaneElement() {
if (planeElement == null) {
planeElement = new EObjectContainmentEList<DiagramElement>(DiagramElement.class, this,
DiPackage.PLANE__PLANE_ELEMENT);
}
return planeElement;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean plane_element_type(DiagnosticChain diagnostics, Map<Object, Object> context) {
// TODO: implement this method
// -> specify the condition that violates the invariant
// -> verify the details of the diagnostic, including severity and message
// Ensure that you remove @generated or mark it @generated NOT
if (false) {
if (diagnostics != null) {
diagnostics.add(new BasicDiagnostic(Diagnostic.ERROR, DiValidator.DIAGNOSTIC_SOURCE,
DiValidator.PLANE__PLANE_ELEMENT_TYPE,
EcorePlugin.INSTANCE.getString("_UI_GenericInvariant_diagnostic",
new Object[] { "plane_element_type", EObjectValidator.getObjectLabel(this, context) }),
new Object[] { this }));
}
return false;
}
return true;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case DiPackage.PLANE__PLANE_ELEMENT:
return ((InternalEList<?>) getPlaneElement()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case DiPackage.PLANE__PLANE_ELEMENT:
return getPlaneElement();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case DiPackage.PLANE__PLANE_ELEMENT:
getPlaneElement().clear();
getPlaneElement().addAll((Collection<? extends DiagramElement>) newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case DiPackage.PLANE__PLANE_ELEMENT:
getPlaneElement().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case DiPackage.PLANE__PLANE_ELEMENT:
return planeElement != null && !planeElement.isEmpty();
}
return super.eIsSet(featureID);
}
} //PlaneImpl
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tika.parser.odf;
import static org.apache.tika.sax.XHTMLContentHandler.XHTML;
import java.io.IOException;
import java.io.InputStream;
import java.util.BitSet;
import java.util.HashMap;
import java.util.Map;
import java.util.Stack;
import javax.xml.namespace.QName;
import org.apache.commons.codec.binary.Base64;
import org.xml.sax.Attributes;
import org.xml.sax.ContentHandler;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.AttributesImpl;
import org.apache.tika.extractor.EmbeddedDocumentExtractor;
import org.apache.tika.extractor.EmbeddedDocumentUtil;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.metadata.Metadata;
import org.apache.tika.parser.ParseContext;
import org.apache.tika.sax.ElementMappingContentHandler;
import org.apache.tika.sax.XHTMLContentHandler;
import org.apache.tika.utils.StringUtils;
/*
Handler for the body element or odt flat files and content.xml of
traditional compressed odt files
*/
class OpenDocumentBodyHandler extends ElementMappingContentHandler {
public static final String TEXT_NS = "urn:oasis:names:tc:opendocument:xmlns:text:1.0";
public static final String TABLE_NS = "urn:oasis:names:tc:opendocument:xmlns:table:1.0";
public static final String STYLE_NS = "urn:oasis:names:tc:opendocument:xmlns:style:1.0";
public static final String FORMATTING_OBJECTS_NS =
"urn:oasis:names:tc:opendocument:xmlns:xsl-fo-compatible:1.0";
public static final String OFFICE_NS = "urn:oasis:names:tc:opendocument:xmlns:office:1.0";
public static final String SVG_NS = "urn:oasis:names:tc:opendocument:xmlns:svg-compatible:1.0";
public static final String PRESENTATION_NS =
"urn:oasis:names:tc:opendocument:xmlns:presentation:1.0";
public static final String DRAW_NS = "urn:oasis:names:tc:opendocument:xmlns:drawing:1.0";
public static final String XLINK_NS = "http://www.w3.org/1999/xlink";
protected static final char[] TAB = new char[]{'\t'};
private static final String BINARY_DATA = "binary-data";
private static final Attributes EMPTY_ATTRIBUTES = new AttributesImpl();
private static final NullListStyle NULL_LIST_STYLE = new NullListStyle();
/**
* Mappings between ODF tag names and XHTML tag names
* (including attributes). All other tag names/attributes are ignored
* and left out from event stream.
*/
private static final HashMap<QName, TargetElement> MAPPINGS =
new HashMap<>();
private static final char[] SPACE = new char[]{' '};
private static final String CLASS = "class";
private static final Attributes ANNOTATION_ATTRIBUTES = buildAttributes(CLASS, "annotation");
private static final Attributes NOTE_ATTRIBUTES = buildAttributes(CLASS, "note");
private static final Attributes NOTES_ATTRIBUTES = buildAttributes(CLASS, "notes");
static {
// general mappings of text:-tags
MAPPINGS.put(new QName(TEXT_NS, "p"), new TargetElement(XHTML, "p"));
// text:h-tags are mapped specifically in startElement/endElement
MAPPINGS.put(new QName(TEXT_NS, "line-break"), new TargetElement(XHTML, "br"));
MAPPINGS.put(new QName(TEXT_NS, "list-item"), new TargetElement(XHTML, "li"));
MAPPINGS.put(new QName(TEXT_NS, "note"), new TargetElement(XHTML, "span"));
MAPPINGS.put(new QName(OFFICE_NS, "annotation"), new TargetElement(XHTML,
"span"));
MAPPINGS.put(new QName(PRESENTATION_NS, "notes"), new TargetElement(XHTML,
"span"));
MAPPINGS.put(new QName(DRAW_NS, "object"), new TargetElement(XHTML,
"object"));
MAPPINGS.put(new QName(DRAW_NS, "text-box"), new TargetElement(XHTML, "div"));
MAPPINGS.put(new QName(SVG_NS, "title"), new TargetElement(XHTML, "span"));
MAPPINGS.put(new QName(SVG_NS, "desc"), new TargetElement(XHTML, "span"));
MAPPINGS.put(new QName(TEXT_NS, "span"), new TargetElement(XHTML, "span"));
final HashMap<QName, QName> aAttsMapping = new HashMap<>();
aAttsMapping.put(new QName(XLINK_NS, "href"), new QName("href"));
aAttsMapping.put(new QName(XLINK_NS, "title"), new QName("title"));
MAPPINGS.put(new QName(TEXT_NS, "a"), new TargetElement(XHTML, "a",
aAttsMapping));
MAPPINGS.put(new QName(DRAW_NS, "a"), new TargetElement(XHTML, "a",
aAttsMapping));
// create HTML tables from table:-tags
MAPPINGS.put(new QName(TABLE_NS, "table"), new TargetElement(XHTML, "table"));
// repeating of rows is ignored; for columns, see below!
MAPPINGS.put(new QName(TABLE_NS, "table-row"), new TargetElement(XHTML, "tr"));
// special mapping for rowspan/colspan attributes
final HashMap<QName, QName> tableCellAttsMapping = new HashMap<>();
tableCellAttsMapping
.put(new QName(TABLE_NS, "number-columns-spanned"), new QName("colspan"));
tableCellAttsMapping.put(new QName(TABLE_NS, "number-rows-spanned"), new QName("rowspan"));
/* TODO: The following is not correct, the cell should be repeated not spanned!
* Code generates a HTML cell, spanning all repeated columns, to make the cell look correct.
* Problems may occur when both spanning and repeating is given, which is not allowed by
* spec.
* Cell spanning instead of repeating is not a problem, because OpenOffice uses it
* only for empty cells.
*/
tableCellAttsMapping
.put(new QName(TABLE_NS, "number-columns-repeated"), new QName("colspan"));
MAPPINGS.put(new QName(TABLE_NS, "table-cell"),
new TargetElement(XHTML, "td", tableCellAttsMapping));
}
private final ContentHandler handler;
private final ParseContext parseContext;
private final BitSet textNodeStack = new BitSet();
//have we written the start style tags
//yet for the current text style
boolean hasWrittenStartStyleTags = false;
//if we're in a binary-data tag
boolean inBinaryData = false;
private EmbeddedDocumentExtractor embeddedDocumentExtractor;
private StringBuilder base64BinaryDataBuffer = new StringBuilder();
private int nodeDepth = 0;
private int completelyFiltered = 0;
private Stack<String> headingStack = new Stack<>();
private Map<String, TextStyle> paragraphTextStyleMap = new HashMap<>();
private Map<String, TextStyle> textStyleMap = new HashMap<>();
private Map<String, ListStyle> listStyleMap = new HashMap<>();
private String currParagraphStyleName; //paragraph style name
private TextStyle currTextStyle; //this is the text style for particular spans/paragraphs
private String currTextStyleName;
private Stack<ListStyle> listStyleStack = new Stack<>();
private ListStyle listStyle;
// True if we are currently in the named style:
private boolean curUnderlined;
private boolean curBold;
private boolean curItalic;
private int pDepth = 0;
OpenDocumentBodyHandler(ContentHandler handler, ParseContext parseContext) {
super(handler, MAPPINGS);
this.handler = handler;
this.parseContext = parseContext;
}
private static Attributes buildAttributes(String key, String value) {
AttributesImpl attrs = new AttributesImpl();
attrs.addAttribute("", key, key, "CDATA", value);
return attrs;
}
@Override
public void characters(char[] ch, int start, int length) throws SAXException {
if (inBinaryData) {
base64BinaryDataBuffer.append(ch, start, length);
return;
}
// only forward content of tags from text:-namespace
if (completelyFiltered == 0 && nodeDepth > 0 && textNodeStack.get(nodeDepth - 1)) {
if (!hasWrittenStartStyleTags) {
updateStyleTags();
hasWrittenStartStyleTags = true;
}
super.characters(ch, start, length);
}
}
// helper for checking tags which need complete filtering
// (with sub-tags)
private boolean needsCompleteFiltering(String namespaceURI, String localName) {
if (TEXT_NS.equals(namespaceURI)) {
return localName.endsWith("-template") || localName.endsWith("-style");
}
return TABLE_NS.equals(namespaceURI) && "covered-table-cell".equals(localName);
}
//<p> can appear inside comments and other things that are already inside <p>
//we need to track our pDepth and only output <p> if we're at the main level
// map the heading level to <hX> HTML tags
private String getXHTMLHeaderTagName(Attributes atts) {
String depthStr = atts.getValue(TEXT_NS, "outline-level");
if (depthStr == null) {
return "h1";
}
int depth = Integer.parseInt(depthStr);
if (depth >= 6) {
return "h6";
} else if (depth <= 1) {
return "h1";
} else {
return "h" + depth;
}
}
/**
* Check if a node is a text node
*/
private boolean isTextNode(String namespaceURI, String localName) {
if (TEXT_NS.equals(namespaceURI) && !localName.equals("page-number") &&
!localName.equals("page-count")) {
return true;
}
if (SVG_NS.equals(namespaceURI)) {
return "title".equals(localName) || "desc".equals(localName);
}
return false;
}
private void startList(String name) throws SAXException {
ListStyle style = null;
if (name == null || ! listStyleMap.containsKey(name)) {
style = NULL_LIST_STYLE;
} else {
style = listStyleMap.get(name);
}
String elementName = style.getTag();
listStyleStack.push(style);
handler.startElement(XHTML, elementName, elementName, EMPTY_ATTRIBUTES);
}
private void endList() throws SAXException {
String elementName = "ul";
if (! listStyleStack.isEmpty()) {
ListStyle style = listStyleStack.pop();
elementName = style != null ? style.getTag() : "ul";
}
handler.endElement(XHTML, elementName, elementName);
}
private void startSpan(String name) throws SAXException {
if (name == null) {
return;
}
currTextStyle = textStyleMap.get(name);
hasWrittenStartStyleTags = false;
}
private void startParagraph(String styleName) throws SAXException {
if (pDepth == 0) {
handler.startElement(XHTML, "p", "p", EMPTY_ATTRIBUTES);
if (styleName != null) {
currTextStyle = paragraphTextStyleMap.get(styleName);
}
hasWrittenStartStyleTags = false;
} else {
handler.characters(SPACE, 0, SPACE.length);
}
pDepth++;
}
private void endParagraph() throws SAXException {
closeStyleTags();
if (pDepth == 1) {
handler.endElement(XHTML, "p", "p");
} else {
handler.characters(SPACE, 0, SPACE.length);
}
pDepth--;
}
private void updateStyleTags() throws SAXException {
if (currTextStyle == null) {
closeStyleTags();
return;
}
if (currTextStyle.bold != curBold) {
// Enforce nesting -- must close s and i tags
if (curUnderlined) {
handler.endElement(XHTML, "u", "u");
curUnderlined = false;
}
if (curItalic) {
handler.endElement(XHTML, "i", "i");
curItalic = false;
}
if (currTextStyle.bold) {
handler.startElement(XHTML, "b", "b", EMPTY_ATTRIBUTES);
} else {
handler.endElement(XHTML, "b", "b");
}
curBold = currTextStyle.bold;
}
if (currTextStyle.italic != curItalic) {
// Enforce nesting -- must close s tag
if (curUnderlined) {
handler.endElement(XHTML, "u", "u");
curUnderlined = false;
}
if (currTextStyle.italic) {
handler.startElement(XHTML, "i", "i", EMPTY_ATTRIBUTES);
} else {
handler.endElement(XHTML, "i", "i");
}
curItalic = currTextStyle.italic;
}
if (currTextStyle.underlined != curUnderlined) {
if (currTextStyle.underlined) {
handler.startElement(XHTML, "u", "u", EMPTY_ATTRIBUTES);
} else {
handler.endElement(XHTML, "u", "u");
}
curUnderlined = currTextStyle.underlined;
}
}
private void endSpan() throws SAXException {
updateStyleTags();
}
private void closeStyleTags() throws SAXException {
// Close any still open style tags
if (curUnderlined) {
handler.endElement(XHTML, "u", "u");
curUnderlined = false;
}
if (curItalic) {
handler.endElement(XHTML, "i", "i");
curItalic = false;
}
if (curBold) {
handler.endElement(XHTML, "b", "b");
curBold = false;
}
currTextStyle = null;
hasWrittenStartStyleTags = false;
}
@Override
public void startElement(String namespaceURI, String localName, String qName, Attributes attrs)
throws SAXException {
if (DRAW_NS.equals(namespaceURI) && "image".equals(localName)) {
String link = attrs.getValue(XLINK_NS, "href");
AttributesImpl attr = new AttributesImpl();
if (!StringUtils.isEmpty(link)) {
attr.addAttribute("", "src", "src", "CDATA", "embedded:" + link);
}
handler.startElement(XHTMLContentHandler.XHTML, "img", "img", attr);
handler.endElement(XHTMLContentHandler.XHTML, "img", "img");
}
if (BINARY_DATA.equals(localName)) {
inBinaryData = true;
return;
}
// keep track of current node type. If it is a text node,
// a bit at the current depth its set in textNodeStack.
// characters() checks the top bit to determine, if the
// actual node is a text node to print out nodeDepth contains
// the depth of the current node and also marks top of stack.
assert nodeDepth >= 0;
// Set styles
if (STYLE_NS.equals(namespaceURI) && "style".equals(localName)) {
String family = attrs.getValue(STYLE_NS, "family");
if ("text".equals(family)) {
currTextStyle = new TextStyle();
currTextStyleName = attrs.getValue(STYLE_NS, "name");
} else if ("paragraph".equals(family)) {
currTextStyle = new TextStyle();
currParagraphStyleName = attrs.getValue(STYLE_NS, "name");
}
} else if (TEXT_NS.equals(namespaceURI) && "list-style".equals(localName)) {
listStyle = new ListStyle();
String name = attrs.getValue(STYLE_NS, "name");
listStyleMap.put(name, listStyle);
} else if (currTextStyle != null && STYLE_NS.equals(namespaceURI) &&
"text-properties".equals(localName)) {
String fontStyle = attrs.getValue(FORMATTING_OBJECTS_NS, "font-style");
if ("italic".equals(fontStyle) || "oblique".equals(fontStyle)) {
currTextStyle.italic = true;
}
String fontWeight = attrs.getValue(FORMATTING_OBJECTS_NS, "font-weight");
if ("bold".equals(fontWeight) || "bolder".equals(fontWeight) ||
(fontWeight != null && Character.isDigit(fontWeight.charAt(0)) &&
Integer.parseInt(fontWeight) > 500)) {
currTextStyle.bold = true;
}
String underlineStyle = attrs.getValue(STYLE_NS, "text-underline-style");
if (underlineStyle != null && !underlineStyle.equals("none")) {
currTextStyle.underlined = true;
}
} else if (listStyle != null && TEXT_NS.equals(namespaceURI)) {
if ("list-level-style-bullet".equals(localName)) {
listStyle.ordered = false;
} else if ("list-level-style-number".equals(localName)) {
listStyle.ordered = true;
}
}
textNodeStack.set(nodeDepth++, isTextNode(namespaceURI, localName));
// filter *all* content of some tags
assert completelyFiltered >= 0;
if (needsCompleteFiltering(namespaceURI, localName)) {
completelyFiltered++;
}
// call next handler if no filtering
if (completelyFiltered == 0) {
// special handling of text:h, that are directly passed
// to incoming handler
if (TEXT_NS.equals(namespaceURI) && "h".equals(localName)) {
final String el = headingStack.push(getXHTMLHeaderTagName(attrs));
handler.startElement(XHTMLContentHandler.XHTML, el, el, EMPTY_ATTRIBUTES);
} else if (TEXT_NS.equals(namespaceURI) && "list".equals(localName)) {
startList(attrs.getValue(TEXT_NS, "style-name"));
} else if (TEXT_NS.equals(namespaceURI) && "span".equals(localName)) {
startSpan(attrs.getValue(TEXT_NS, "style-name"));
} else if (TEXT_NS.equals(namespaceURI) && "p".equals(localName)) {
startParagraph(attrs.getValue(TEXT_NS, "style-name"));
} else if (TEXT_NS.equals(namespaceURI) && "s".equals(localName)) {
handler.characters(SPACE, 0, 1);
} else if ("annotation".equals(localName)) {
closeStyleTags();
handler.startElement(XHTML, "p", "p", ANNOTATION_ATTRIBUTES);
} else if ("note".equals(localName)) {
closeStyleTags();
handler.startElement(XHTML, "p", "p", NOTE_ATTRIBUTES);
} else if ("notes".equals(localName)) {
closeStyleTags();
handler.startElement(XHTML, "p", "p", NOTES_ATTRIBUTES);
} else {
super.startElement(namespaceURI, localName, qName, attrs);
}
}
}
@Override
public void endElement(String namespaceURI, String localName, String qName)
throws SAXException {
if (BINARY_DATA.equals(localName)) {
inBinaryData = false;
try {
processBinaryData();
} catch (IOException e) {
throw new SAXException(e);
}
return;
}
if (STYLE_NS.equals(namespaceURI) && "style".equals(localName)) {
if (currTextStyle != null && currTextStyleName != null) {
textStyleMap.put(currTextStyleName, currTextStyle);
currTextStyleName = null;
currTextStyle = null;
} else if (currTextStyle != null && currParagraphStyleName != null) {
paragraphTextStyleMap.put(currParagraphStyleName, currTextStyle);
currParagraphStyleName = null;
currTextStyle = null;
}
} else if (TEXT_NS.equals(namespaceURI) && "list-style".equals(localName)) {
listStyle = null;
}
// call next handler if no filtering
if (completelyFiltered == 0) {
// special handling of text:h, that are directly passed
// to incoming handler
if (TEXT_NS.equals(namespaceURI) && "h".equals(localName)) {
final String el = headingStack.pop();
closeStyleTags();
handler.endElement(namespaceURI, el, el);
} else if (TEXT_NS.equals(namespaceURI) && "list".equals(localName)) {
endList();
} else if (TEXT_NS.equals(namespaceURI) && "span".equals(localName)) {
currTextStyle = null;
hasWrittenStartStyleTags = false;
} else if (TEXT_NS.equals(namespaceURI) && "p".equals(localName)) {
endParagraph();
} else if ("annotation".equals(localName) || "note".equals(localName) ||
"notes".equals(localName)) {
closeStyleTags();
handler.endElement(namespaceURI, "p", "p");
} else if ("a".equals(localName)) {
closeStyleTags();
super.endElement(namespaceURI, localName, qName);
} else {
super.endElement(namespaceURI, localName, qName);
}
// special handling of tabulators
if (TEXT_NS.equals(namespaceURI) &&
("tab-stop".equals(localName) || "tab".equals(localName))) {
this.characters(TAB, 0, TAB.length);
}
}
// revert filter for *all* content of some tags
if (needsCompleteFiltering(namespaceURI, localName)) {
completelyFiltered--;
}
assert completelyFiltered >= 0;
// reduce current node depth
nodeDepth--;
assert nodeDepth >= 0;
}
private void processBinaryData() throws IOException, SAXException {
//TODO: figure out whether we're in an inline image or a regular
//attachment and add that info to the embedded metadata
byte[] bytes = Base64.decodeBase64(base64BinaryDataBuffer.toString());
//clear state before parsing
base64BinaryDataBuffer.setLength(0);
inBinaryData = false;
if (embeddedDocumentExtractor == null) {
embeddedDocumentExtractor =
EmbeddedDocumentUtil.getEmbeddedDocumentExtractor(parseContext);
}
Metadata embeddedMetadata = new Metadata();
if (embeddedDocumentExtractor.shouldParseEmbedded(embeddedMetadata)) {
try (InputStream is = TikaInputStream.get(bytes)) {
embeddedDocumentExtractor.parseEmbedded(is, handler, embeddedMetadata, false);
}
}
}
@Override
public void startPrefixMapping(String prefix, String uri) {
// remove prefix mappings as they should not occur in XHTML
}
@Override
public void endPrefixMapping(String prefix) {
// remove prefix mappings as they should not occur in XHTML
}
private interface Style {
}
private static class TextStyle implements Style {
public boolean italic;
public boolean bold;
public boolean underlined;
@Override
public String toString() {
return "TextStyle{" + "italic=" + italic + ", bold=" + bold + ", underlined=" +
underlined + '}';
}
}
private static class ListStyle implements Style {
public boolean ordered;
public String getTag() {
return ordered ? "ol" : "ul";
}
}
private static class NullListStyle extends ListStyle {
NullListStyle() {
ordered = false;
}
@Override
public String getTag() {
return "ul";
}
}
}
|
package com.example.mytips.login;
import android.content.Intent;
import android.os.Bundle;
import android.text.TextUtils;
import android.util.Log;
import android.util.Patterns;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.ProgressBar;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import com.example.mytips.MainActivity;
import com.example.mytips.R;
import com.example.mytips.setting.SettingActivity;
import com.google.android.gms.tasks.OnCompleteListener;
import com.google.android.gms.tasks.Task;
import com.google.firebase.auth.AuthResult;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.auth.FirebaseUser;
import java.util.regex.Pattern;
public class LoginActivity extends AppCompatActivity implements View.OnClickListener {
private static final String TAG="LoginActivity";
private ProgressBar mProgressBar;
private TextView textWait;
private TextView register;
private EditText inputAccount;
private EditText inputPassword;
private Button loginButton;
private ImageButton settingButton;
private FirebaseAuth mAuth;
//private MainActivity mainActivity;
public LoginActivity() {
}
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
mAuth=FirebaseAuth.getInstance();
mProgressBar=this.findViewById(R.id.activity_login_progress_bar);
textWait=this.findViewById(R.id.activity_login_text_wait);
register=this.findViewById(R.id.activity_login_text_register);
register.setOnClickListener(this);
inputAccount=this.findViewById(R.id.activity_login_edit_text_email);
inputPassword=this.findViewById(R.id.activity_login_edit_text_password);
loginButton=this.findViewById(R.id.activity_login_button);
loginButton.setOnClickListener(this);
settingButton=this.findViewById(R.id.activity_login_setting_button);
initSettingButton();
progressBarDisappear();
}
@Override
protected void onStart() {
super.onStart();
// Check if user is signed in (non-null) and update UI accordingly.
FirebaseUser currentUser = mAuth.getCurrentUser();
//If the user is logged in, navigate to message fragment
//updateUIForOnStart(currentUser);
}
private void progressBarDisappear(){
mProgressBar.setVisibility(View.GONE);
textWait.setVisibility(View.GONE);
}
private void initSettingButton(){
settingButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent mIntent=new Intent(getApplicationContext(), SettingActivity.class);
startActivity(mIntent);
}
});
}
private Boolean isAccountValid(String account){
Pattern emailPattern= Patterns.EMAIL_ADDRESS;
return emailPattern.matcher(account).matches();
}
//If the user is logged in and email is verified, navigate to message fragment.
//If the user is logged in but email is not verified, log out.
private void updateUI(FirebaseUser firebaseUser){
if(firebaseUser!=null){
if(firebaseUser.isEmailVerified()) {
Intent mIntent=new Intent(this,MainActivity.class);
mIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(mIntent);
} else {
mAuth.signOut();
Toast.makeText(getApplicationContext(), "Please verify your email", Toast.LENGTH_SHORT).show();
}
}
}
@Override
public void onClick(View v) {
switch (v.getId()){
case R.id.activity_login_button:
String account=inputAccount.getText().toString();
String password=inputPassword.getText().toString();
if(TextUtils.isEmpty(account)||!isAccountValid(account)){
Toast.makeText(getApplicationContext(),R.string.invalid_email_format_reminder,Toast.LENGTH_SHORT).show();
}else if ((TextUtils.isEmpty(password))){
Toast.makeText(getApplicationContext(),R.string.input_password_reminder,Toast.LENGTH_SHORT).show();
} else {
mProgressBar.setVisibility(View.VISIBLE);
textWait.setVisibility(View.VISIBLE);
mAuth.signInWithEmailAndPassword(account, password)
.addOnCompleteListener(new OnCompleteListener<AuthResult>() {
@Override
public void onComplete(@NonNull Task<AuthResult> task) {
if (task.isSuccessful()) {
// Sign in success, update UI with the signed-in user's information
Log.d(TAG, "signInWithEmail:success");
mProgressBar.setVisibility(View.GONE);
textWait.setVisibility(View.GONE);
FirebaseUser user = mAuth.getCurrentUser();
updateUI(user);
} else {
// If sign in fails, display a message to the user.
Log.w(TAG, "signInWithEmail:failure", task.getException());
Toast.makeText(getApplicationContext(), "Authentication failed.",
Toast.LENGTH_SHORT).show();
mProgressBar.setVisibility(View.GONE);
textWait.setVisibility(View.GONE);
}
}
});
}
break;
case R.id.activity_login_text_register:
Intent intent=new Intent(this, RegisterActivity.class);
this.startActivity(intent);
}
}
/*private void updateUIForOnStart(FirebaseUser firebaseUser){
if(firebaseUser!=null){
if(firebaseUser.isEmailVerified()) {
Intent mIntent=new Intent(this,MainActivity.class);
startActivity(mIntent);
} else {
mAuth.signOut();
}
}
}*/
}
|
/*
* Copyright 2006-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.consol.citrus.dsl.builder;
/**
* @author Christoph Deppisch
* @since 2.3
*/
public interface BuilderSupport<T extends AbstractTestActionBuilder> {
void configure(T builder);
}
|
// This file is licensed under the Elastic License 2.0. Copyright 2021-present, StarRocks Limited.
package com.starrocks.sql.optimizer.rule.transformation;
import com.google.common.collect.Lists;
import com.starrocks.sql.optimizer.OptExpression;
import com.starrocks.sql.optimizer.OptimizerContext;
import com.starrocks.sql.optimizer.operator.OperatorType;
import com.starrocks.sql.optimizer.operator.logical.LogicalLimitOperator;
import com.starrocks.sql.optimizer.operator.pattern.Pattern;
import com.starrocks.sql.optimizer.rule.RuleType;
import java.util.List;
public class PushDownLimitCTEAnchor extends TransformationRule {
public PushDownLimitCTEAnchor() {
super(RuleType.TF_PUSH_DOWN_LIMIT_CTE_ANCHOR, Pattern.create(OperatorType.LOGICAL_LIMIT)
.addChildren(Pattern.create(OperatorType.LOGICAL_CTE_ANCHOR, OperatorType.PATTERN_MULTI_LEAF)));
}
@Override
public List<OptExpression> transform(OptExpression input, OptimizerContext context) {
LogicalLimitOperator limit = (LogicalLimitOperator) input.getOp();
OptExpression anchor = input.inputAt(0);
// push down to right child
OptExpression nl = new OptExpression(new LogicalLimitOperator(limit.getLimit(), limit.getOffset(), limit.getPhase()));
nl.getInputs().add(anchor.getInputs().get(1));
anchor.getInputs().set(1, nl);
return Lists.newArrayList(anchor);
}
}
|
/*
* Copyright to the original author or authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.rioproject.eventcollector.service;
import net.jini.config.Configuration;
import net.jini.core.event.RemoteEvent;
import net.jini.discovery.DiscoveryManagement;
import org.rioproject.event.EventDescriptor;
import java.io.File;
import java.util.concurrent.BlockingQueue;
/**
* Holds properties for the event collector
* @author Dennis Reedy
*/
public class EventCollectorContext {
private final Configuration config;
private final BlockingQueue<RemoteEvent> eventQueue;
private final EventDescriptor[] eventDescriptors;
private final DiscoveryManagement discoveryManager;
private final File persistentDirectoryRoot;
public EventCollectorContext(Configuration config,
BlockingQueue<RemoteEvent> eventQueue,
EventDescriptor[] eventDescriptors,
DiscoveryManagement discoveryManager,
File persistentDirectoryRoot) {
this.config = config;
this.eventQueue = eventQueue;
this.eventDescriptors = eventDescriptors;
this.discoveryManager = discoveryManager;
this.persistentDirectoryRoot = persistentDirectoryRoot;
}
public Configuration getConfiguration() {
return config;
}
public BlockingQueue<RemoteEvent> getEventQueue() {
return eventQueue;
}
public EventDescriptor[] getEventDescriptors() {
return eventDescriptors;
}
public DiscoveryManagement getDiscoveryManager() {
return discoveryManager;
}
public File getPersistentDirectoryRoot() {
return persistentDirectoryRoot;
}
}
|
/**
* Scaffolding file used to store all the setups needed to run
* tests automatically generated by EvoSuite
* Mon Nov 30 00:06:20 GMT 2020
*/
package com.lts.io;
import org.evosuite.runtime.annotation.EvoSuiteClassExclude;
import org.junit.BeforeClass;
import org.junit.Before;
import org.junit.After;
import org.junit.AfterClass;
import org.evosuite.runtime.sandbox.Sandbox;
import org.evosuite.runtime.sandbox.Sandbox.SandboxMode;
@EvoSuiteClassExclude
public class ArchiveScanner_ESTest_scaffolding {
@org.junit.Rule
public org.evosuite.runtime.vnet.NonFunctionalRequirementRule nfr = new org.evosuite.runtime.vnet.NonFunctionalRequirementRule();
private static final java.util.Properties defaultProperties = (java.util.Properties) java.lang.System.getProperties().clone();
private org.evosuite.runtime.thread.ThreadStopper threadStopper = new org.evosuite.runtime.thread.ThreadStopper (org.evosuite.runtime.thread.KillSwitchHandler.getInstance(), 3000);
@BeforeClass
public static void initEvoSuiteFramework() {
org.evosuite.runtime.RuntimeSettings.className = "com.lts.io.ArchiveScanner";
org.evosuite.runtime.GuiSupport.initialize();
org.evosuite.runtime.RuntimeSettings.maxNumberOfThreads = 100;
org.evosuite.runtime.RuntimeSettings.maxNumberOfIterationsPerLoop = 10000;
org.evosuite.runtime.RuntimeSettings.mockSystemIn = true;
org.evosuite.runtime.RuntimeSettings.sandboxMode = org.evosuite.runtime.sandbox.Sandbox.SandboxMode.RECOMMENDED;
org.evosuite.runtime.sandbox.Sandbox.initializeSecurityManagerForSUT();
org.evosuite.runtime.classhandling.JDKClassResetter.init();
setSystemProperties();
initializeClasses();
org.evosuite.runtime.Runtime.getInstance().resetRuntime();
}
@AfterClass
public static void clearEvoSuiteFramework(){
Sandbox.resetDefaultSecurityManager();
java.lang.System.setProperties((java.util.Properties) defaultProperties.clone());
}
@Before
public void initTestCase(){
threadStopper.storeCurrentThreads();
threadStopper.startRecordingTime();
org.evosuite.runtime.jvm.ShutdownHookHandler.getInstance().initHandler();
org.evosuite.runtime.sandbox.Sandbox.goingToExecuteSUTCode();
setSystemProperties();
org.evosuite.runtime.GuiSupport.setHeadless();
org.evosuite.runtime.Runtime.getInstance().resetRuntime();
org.evosuite.runtime.agent.InstrumentingAgent.activate();
}
@After
public void doneWithTestCase(){
threadStopper.killAndJoinClientThreads();
org.evosuite.runtime.jvm.ShutdownHookHandler.getInstance().safeExecuteAddedHooks();
org.evosuite.runtime.classhandling.JDKClassResetter.reset();
resetClasses();
org.evosuite.runtime.sandbox.Sandbox.doneWithExecutingSUTCode();
org.evosuite.runtime.agent.InstrumentingAgent.deactivate();
org.evosuite.runtime.GuiSupport.restoreHeadlessMode();
}
public static void setSystemProperties() {
java.lang.System.setProperties((java.util.Properties) defaultProperties.clone());
java.lang.System.setProperty("file.encoding", "UTF-8");
java.lang.System.setProperty("java.awt.headless", "true");
java.lang.System.setProperty("java.io.tmpdir", "/tmp");
java.lang.System.setProperty("user.dir", "/home/ubuntu/termite/projects/78_caloriecount");
java.lang.System.setProperty("user.home", "/home/ubuntu");
java.lang.System.setProperty("user.language", "en");
java.lang.System.setProperty("user.name", "ubuntu");
java.lang.System.setProperty("user.timezone", "Etc/UTC");
}
private static void initializeClasses() {
org.evosuite.runtime.classhandling.ClassStateSupport.initializeClasses(ArchiveScanner_ESTest_scaffolding.class.getClassLoader() ,
"com.lts.LTSException",
"com.lts.io.archive.AbstractTempDirectoryArchive",
"com.lts.io.DirectoryScanner",
"com.lts.io.archive.ArchiveTreeNode",
"com.lts.io.ImprovedFile$Reason",
"com.lts.util.deepcopy.DeepCopier",
"com.lts.io.archive.AbstractNestedArchive",
"com.lts.io.archive.ZipArchive",
"com.lts.io.ArchiveScanner",
"com.lts.io.archive.DefaultNestedArchive",
"com.lts.util.TreeNode",
"com.lts.io.archive.Archive",
"com.lts.io.ImprovedFile",
"com.lts.util.deepcopy.DeepCopyException",
"com.lts.io.IOUtilities",
"com.lts.io.ImprovedFile$FileException",
"com.lts.io.archive.NestedArchive"
);
}
private static void resetClasses() {
org.evosuite.runtime.classhandling.ClassResetter.getInstance().setClassLoader(ArchiveScanner_ESTest_scaffolding.class.getClassLoader());
org.evosuite.runtime.classhandling.ClassStateSupport.resetClasses(
"com.lts.io.DirectoryScanner",
"com.lts.io.ArchiveScanner",
"com.lts.io.ImprovedFile",
"com.lts.io.archive.AbstractNestedArchive",
"com.lts.io.archive.DefaultNestedArchive",
"com.lts.io.ImprovedFile$FileException",
"com.lts.io.ImprovedFile$Reason",
"com.lts.LTSException",
"com.lts.io.archive.AbstractTempDirectoryArchive",
"com.lts.io.archive.ZipArchive",
"com.lts.io.IOUtilities",
"com.lts.util.TreeNode",
"com.lts.io.archive.ArchiveTreeNode"
);
}
}
|
package io.vertx.test.codegen.testdataobject;
import io.vertx.codegen.annotations.DataObject;
import io.vertx.core.json.JsonObject;
/**
* @author <a href="mailto:julien@julienviet.com">Julien Viet</a>
*/
@DataObject
public interface JsonObjectSetter {
JsonObjectSetter setJsonObject(JsonObject s);
}
|
package org.jhaws.common.web.wicket.qtip;
import org.jhaws.common.web.wicket.CssResourceReference;
import org.jhaws.common.web.wicket.JavaScriptResourceReference;
import org.jhaws.common.web.wicket.WicketApplication;
// v3.0.3
/**
* @see http://qtip2.com
*/
public class QTip {
public static JavaScriptResourceReference JS = new JavaScriptResourceReference(QTip.class, "jquery.qtip.js")
.addJavaScriptResourceReferenceDependency(WicketApplication.get().getJavaScriptLibrarySettings().getJQueryReference());
public static CssResourceReference CSS = new CssResourceReference(QTip.class, "jquery.qtip.css");
}
|
/**
*
*/
package com.allendowney.thinkdast;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Implementation of a Map using a List of entries, so most
* operations are linear time.
*
* @author downey
* @param <K>
* @param <V>
*
*/
public class MyLinearMap<K, V> implements Map<K, V> {
private List<Entry> entries = new ArrayList<Entry>();
public class Entry implements Map.Entry<K, V> {
private K key;
private V value;
public Entry(K key, V value) {
this.key = key;
this.value = value;
}
@Override
public K getKey() {
return key;
}
@Override
public V getValue() {
return value;
}
@Override
public V setValue(V newValue) {
value = newValue;
return value;
}
}
@Override
public void clear() {
entries.clear();
}
@Override
public boolean containsKey(Object target) {
return findEntry(target) != null;
}
/**
* Returns the entry that contains the target key, or null if there is none.
*
* @param target
*/
private Entry findEntry(Object target) {
for (Entry entry: entries) {
if (equals(target, entry.getKey())) {
return entry;
}
}
return null;
}
/**
* Compares two keys or two values, handling null correctly.
*
* @param target
* @param obj
* @return
*/
private boolean equals(Object target, Object obj) {
if (target == null) {
return obj == null;
}
return target.equals(obj);
}
@Override
public boolean containsValue(Object target) {
for (Map.Entry<K, V> entry: entries) {
if (equals(target, entry.getValue())) {
return true;
}
}
return false;
}
@Override
public Set<Map.Entry<K, V>> entrySet() {
throw new UnsupportedOperationException();
}
@Override
public V get(Object key) {
// TODO: FILL THIS IN!
Entry entry = findEntry(key);
if (entry == null) {
return null;
}else{
return entry.getValue();
}
}
@Override
public boolean isEmpty() {
return entries.isEmpty();
}
@Override
public Set<K> keySet() {
Set<K> set = new HashSet<K>();
for (Entry entry: entries) {
set.add(entry.getKey());
}
return set;
}
@Override
public V put(K key, V value) {
// TODO: FILL THIS IN!
Entry entry = findEntry(key);
if (entry == null) {
entries.add(new Entry(key, value));
return null;
}else{
V oldValue = entry.getValue();
entry.setValue(value);
return oldValue;
}
}
@Override
public void putAll(Map<? extends K, ? extends V> map) {
for (Map.Entry<? extends K, ? extends V> entry: map.entrySet()) {
put(entry.getKey(), entry.getValue());
}
}
@Override
public V remove(Object key) {
// TODO: FILL THIS IN!
Entry entry = findEntry(key);
if (entry == null) {
return null;
} else {
V value = entry.getValue();
entries.remove(entry);
return value;
}
}
@Override
public int size() {
return entries.size();
}
@Override
public Collection<V> values() {
Set<V> set = new HashSet<V>();
for (Entry entry: entries) {
set.add(entry.getValue());
}
return set;
}
/**
* @param args
*/
public static void main(String[] args) {
Map<String, Integer> map = new MyLinearMap<String, Integer>();
map.put("Word1", 1);
map.put("Word2", 2);
Integer value = map.get("Word1");
System.out.println(value);
for (String key: map.keySet()) {
System.out.println(key + ", " + map.get(key));
}
}
/**
* Returns a reference to `entries`.
*
* This is not part of the Map interface; it is here to provide the functionality
* of `entrySet` in a way that is substantially simpler than the "right" way.
*
* @return
*/
protected Collection<? extends java.util.Map.Entry<K, V>> getEntries() {
return entries;
}
}
|
/*
* #%L
* wcm.io
* %%
* Copyright (C) 2021 wcm.io
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package io.wcm.caravan.rhyme.jaxrs.impl.docs;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.catchThrowable;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.when;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.UriInfo;
import org.apache.commons.io.IOUtils;
import org.apache.sling.testing.mock.osgi.junit.OsgiContext;
import org.apache.sling.testing.mock.osgi.junit5.OsgiContextExtension;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import com.google.common.base.Charsets;
@ExtendWith(OsgiContextExtension.class)
@ExtendWith(MockitoExtension.class)
public class RhymeDocsHtmlResourceTest {
private final OsgiContext context = new OsgiContext();
@Mock
private RhymeDocsOsgiBundleSupport docsSupport;
@Mock
private UriInfo uriInfo;
private RhymeDocsHtmlResource resource;
@BeforeEach
void setUp() {
context.registerService(docsSupport);
// this is not really required for the resource, but we want to at least test this can be activated
context.registerService(new RhymeDocsJaxRsApplication());
resource = context.registerInjectActivateService(new RhymeDocsHtmlResource());
}
private String getHtmlDocumentation(String fileName) {
return resource.getHtmlDocumentation(fileName);
}
@Test
public void getHtmlDocumentation_should_return_404_if_no_InputStream_available_for_given_filename() throws Exception {
Throwable ex = catchThrowable(() -> getHtmlDocumentation("Foo.html"));
assertThat(ex).isInstanceOf(WebApplicationException.class);
assertThat(((WebApplicationException)ex).getResponse().getStatus())
.isEqualTo(404);
}
@Test
public void getHtmlDocumentation_should_return_html_if_InputStream_available_for_given_filename() throws Exception {
String expectedHtml = "<föö></föö>";
when(docsSupport.openResourceStream(anyString()))
.thenReturn(IOUtils.toInputStream(expectedHtml, Charsets.UTF_8));
String html = getHtmlDocumentation("Foo.html");
assertThat(html)
.isEqualTo(expectedHtml);
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search;
import org.apache.lucene.document.*;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermDocs;
import org.apache.lucene.index.TermEnum;
import org.apache.lucene.search.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.core.SolrConfig;
import org.apache.solr.core.SolrCore;
import org.apache.solr.core.SolrInfoMBean;
import org.apache.solr.core.SolrResourceLoader.PartionKey;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
import org.apache.lucene.util.OpenBitSet;
import org.apache.lucene.util.cache.Cache;
import org.apache.lucene.util.cache.SimpleLRUCache;
import java.io.IOException;
import java.net.URL;
import java.util.*;
import java.util.concurrent.atomic.AtomicLong;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.alimama.mdrill.utils.UniqConfig;
public class SolrIndexSearcher extends IndexSearcher implements SolrInfoMBean {
public static final AtomicLong numOpens = new AtomicLong();
public static final AtomicLong numCloses = new AtomicLong();
private static Logger log = LoggerFactory.getLogger(SolrIndexSearcher.class);
private final SolrCore core;
private final IndexSchema schema;
private String indexDir;
private final String name;
private long openTime = System.currentTimeMillis();
private long registerTime = 0;
private long warmupTime = 0;
private final SolrIndexReader reader;
private final boolean closeReader;
private final int queryResultWindowSize;
private final int queryResultMaxDocsCached;
private final boolean useFilterForSortedQuery;
public final boolean enableLazyFieldLoading;
private Cache<Query,DocSet> filterCache=Cache.synchronizedCache(new SimpleLRUCache<Query,DocSet>(UniqConfig.fqCacheSize()));
private Cache<QueryResultKey,DocList> queryResultCache=Cache.synchronizedCache(new SimpleLRUCache<QueryResultKey,DocList>(UniqConfig.fqCacheSize()));
public void SolrCoreClearCache()
{
filterCache=Cache.synchronizedCache(new SimpleLRUCache<Query,DocSet>(UniqConfig.fqCacheSize()));
queryResultCache=Cache.synchronizedCache(new SimpleLRUCache<QueryResultKey,DocList>(UniqConfig.fqCacheSize()));
}
public void filterCachePut(Query query, DocSet docset) {
filterCache.put(query, docset);
}
public DocSet filterCacheGet(Query q) {
DocSet answer = filterCache.get(q);
if (answer != null) {
return answer;
}
return null;
}
@Override
public void close() throws IOException {
this.SolrCoreClearCache();
if (closeReader) reader.decRef();
numCloses.incrementAndGet();
}
private String partionKey = "default";
private PartionKey partionCacheKey;
public PartionKey getPartionCacheKey() {
return partionCacheKey;
}
public void setPartionCacheKey(PartionKey partionCacheKey) {
this.partionCacheKey = partionCacheKey;
}
public String getPartionKey() {
return partionKey;
}
public void setPartionKey(String partionKey) {
this.partionKey = partionKey;
}
private final Collection<String> fieldNames;
private Collection<String> storedHighlightFieldNames;
/** Creates a searcher searching the index in the provided directory. */
public SolrIndexSearcher(SolrCore core, IndexSchema schema, String name, Directory directory, boolean readOnly) throws IOException {
this(core, schema,name, core.getIndexReaderFactory().newReader(directory, readOnly), true);
}
private static SolrIndexReader wrap(IndexReader r) {
SolrIndexReader sir;
// wrap the reader
if (!(r instanceof SolrIndexReader)) {
sir = new SolrIndexReader(r, null, 0);
sir.associateInfo(null);
} else {
sir = (SolrIndexReader)r;
}
return sir;
}
Directory fieldcacheDir = null;
public Directory getFieldcacheDir() {
return fieldcacheDir;
}
public void setFieldcacheDir(Directory fieldcacheDir) {
this.fieldcacheDir = fieldcacheDir;
}
public SolrIndexSearcher(SolrCore core, IndexSchema schema, String name, IndexReader r, boolean closeReader) {
super(wrap(r));
this.reader = (SolrIndexReader)super.getIndexReader();
this.core = core;
this.schema = schema;
this.name = "Searcher@" + Integer.toHexString(hashCode()) + (name!=null ? " "+name : "");
log.info("Opening " + this.name);
SolrIndexReader.setSearcher(reader, this);
if (r.directory() instanceof FSDirectory) {
FSDirectory fsDirectory = (FSDirectory) r.directory();
indexDir = fsDirectory.getDirectory().getAbsolutePath();
}
this.closeReader = closeReader;
setSimilarity(schema.getSimilarity());
SolrConfig solrConfig = core.getSolrConfig();
queryResultWindowSize = solrConfig.queryResultWindowSize;
queryResultMaxDocsCached = solrConfig.queryResultMaxDocsCached;
useFilterForSortedQuery = solrConfig.useFilterForSortedQuery;
enableLazyFieldLoading = solrConfig.enableLazyFieldLoading;
fieldNames = r.getFieldNames(IndexReader.FieldOption.ALL);
numOpens.incrementAndGet();
}
@Override
public String toString() {
return name;
}
public SolrCore getCore() {
return core;
}
public SolrIndexReader getReader() { return reader; }
public IndexSchema getSchema() { return schema; }
public QueryResult search(QueryResult qr, QueryCommand cmd) throws IOException {
getDocListC(qr,cmd);
return qr;
}
public String getIndexDir() {
return indexDir;
}
static class SetNonLazyFieldSelector implements FieldSelector {
private Set<String> fieldsToLoad;
SetNonLazyFieldSelector(Set<String> toLoad) {
fieldsToLoad = toLoad;
}
public FieldSelectorResult accept(String fieldName) {
if(fieldsToLoad.contains(fieldName))
return FieldSelectorResult.LOAD;
else
return FieldSelectorResult.LAZY_LOAD;
}
}
@Override
public Document doc(int i) throws IOException {
return doc(i, (Set<String>)null);
}
@Override
public Document doc(int n, FieldSelector fieldSelector) throws IOException {
return getIndexReader().document(n, fieldSelector);
}
public Document doc(int i, Set<String> fields) throws IOException {
Document d;
if(!enableLazyFieldLoading||fields==null) {
d = getIndexReader().document(i);
} else {
d = getIndexReader().document(i, new SetNonLazyFieldSelector(fields));
}
return d;
}
public void readDocs(Document[] docs, DocList ids) throws IOException {
readDocs(docs, ids, null);
}
public void readDocs(Document[] docs, DocList ids, Set<String> fields) throws IOException {
DocIterator iter = ids.iterator();
for (int i=0; i<docs.length; i++) {
docs[i] = doc(iter.nextDoc(), fields);
}
}
public Sort weightSort(Sort sort) throws IOException {
if (sort == null) return null;
SortField[] sorts = sort.getSort();
boolean needsWeighting = false;
for (SortField sf : sorts) {
if (sf instanceof SolrSortField) {
needsWeighting = true;
break;
}
}
if (!needsWeighting) return sort;
SortField[] newSorts = sorts.clone();
for (int i=0; i<newSorts.length; i++) {
if (newSorts[i] instanceof SolrSortField) {
newSorts[i] = ((SolrSortField)newSorts[i]).weight(this);
}
}
return new Sort(newSorts);
}
public int getFirstMatch(Term t) throws IOException {
TermDocs tdocs = null;
try {
tdocs = reader.termDocs(t);
if (!tdocs.next()) return -1;
return tdocs.doc();
} finally {
if (tdocs!=null) tdocs.close();
}
}
public DocSet getDocSet(Query query) throws IOException {
if (query instanceof ExtendedQuery) {
ExtendedQuery eq = (ExtendedQuery)query;
if (!eq.getCache()) {
if (query instanceof WrappedQuery) {
query = ((WrappedQuery)query).getWrappedQuery();
}
query = QueryUtils.makeQueryable(query);
return getDocSetNC(query, null);
}
}
// Get the absolute value (positive version) of this query. If we
// get back the same reference, we know it's positive.
Query absQ = QueryUtils.getAbs(query);
boolean positive = query==absQ;
DocSet absAnswer = this.filterCacheGet(absQ);
if (absAnswer!=null) {
if (positive) return absAnswer;
else return getPositiveDocSet(matchAllDocsQuery).andNot(absAnswer);
}
absAnswer = getDocSetNC(absQ, null);
DocSet answer = positive ? absAnswer : getPositiveDocSet(matchAllDocsQuery).andNot(absAnswer);
this.filterCachePut(absQ, absAnswer);
return answer;
}
DocSet getPositiveDocSet(Query q) throws IOException {
DocSet answer= this.filterCacheGet(q);
if (answer!=null)
{
return answer;
}
answer = getDocSetNC(q,null);
this.filterCachePut(q, answer);
return answer;
}
public DocSet getPositiveDocSet(Query q, TermDocsState tdState) throws IOException {
DocSet answer = this.filterCacheGet(q);
if (answer!=null)
{
return answer;
}
answer = getDocSetNC(q, null, tdState);
this.filterCachePut(q, answer);
return answer;
}
private static Query matchAllDocsQuery = new MatchAllDocsQuery();
public static class ProcessedFilter {
public DocSet answer; // the answer, if non-null
public Filter filter;
public DelegatingCollector postFilter;
}
private static Comparator<Query> sortByCost = new Comparator<Query>() {
public int compare(Query q1, Query q2) {
return ((ExtendedQuery)q1).getCost() - ((ExtendedQuery)q2).getCost();
}
};
public DocSet getDocSet(List<Query> queries) throws IOException {
ProcessedFilter pf = getProcessedFilter(null, queries);
if (pf.answer != null) return pf.answer;
DocSetCollector setCollector = new DocSetCollector(maxDoc()>>6, maxDoc());
Collector collector = setCollector;
if (pf.postFilter != null) {
pf.postFilter.setLastDelegate(collector);
collector = pf.postFilter;
}
final SolrIndexReader[] leaves = reader.getLeafReaders();
final int offsets[] = reader.getLeafOffsets();
for (int i=0; i<leaves.length; i++) {
SolrIndexReader subReader = leaves[i];
int baseDoc = offsets[i];
DocIdSet idSet = null;
if (pf.filter != null) {
idSet = pf.filter.getDocIdSet(subReader);
if (idSet == null) continue;
}
DocIdSetIterator idIter = null;
if (idSet != null) {
idIter = idSet.iterator();
if (idIter == null) continue;
}
collector.setNextReader(subReader, baseDoc);
int max = subReader.maxDoc();
if (idIter == null) {
TermDocs tdocs = subReader.termDocs(null);
while (tdocs.next()) {
int docid = tdocs.doc();
collector.collect(docid);
}
} else {
for (int docid = -1; (docid = idIter.advance(docid+1)) < max; ) {
if (subReader.isDeleted(docid)) continue;
collector.collect(docid);
}
}
}
return setCollector.getDocSet();
}
public ProcessedFilter getProcessedFilter(DocSet setFilter, List<Query> queries) throws IOException {
ProcessedFilter pf = new ProcessedFilter();
if (queries==null || queries.size()==0) {
if (setFilter != null)
pf.filter = setFilter.getTopFilter();
return pf;
}
DocSet answer=null;
boolean[] neg = new boolean[queries.size()+1];
DocSet[] sets = new DocSet[queries.size()+1];
List<Query> notCached = null;
List<Query> postFilters = null;
int end = 0;
int smallestIndex = -1;
if (setFilter != null) {
answer = sets[end++] = setFilter;
smallestIndex = end;
}
int smallestCount = Integer.MAX_VALUE;
for (Query q : queries) {
if (q instanceof ExtendedQuery) {
ExtendedQuery eq = (ExtendedQuery)q;
if (!eq.getCache()) {
if (eq.getCost() >= 100 && eq instanceof PostFilter) {
if (postFilters == null) postFilters = new ArrayList<Query>(sets.length-end);
postFilters.add(q);
} else {
if (notCached == null) notCached = new ArrayList<Query>(sets.length-end);
notCached.add(q);
}
continue;
}
}
Query posQuery = QueryUtils.getAbs(q);
sets[end] = getPositiveDocSet(posQuery);
// Negative query if absolute value different from original
if (q==posQuery) {
neg[end] = false;
// keep track of the smallest positive set.
// This optimization is only worth it if size() is cached, which it would
// be if we don't do any set operations.
int sz = sets[end].size();
if (sz<smallestCount) {
smallestCount=sz;
smallestIndex=end;
answer = sets[end];
}
} else {
neg[end] = true;
}
end++;
}
// Are all of our normal cached filters negative?
if (end > 0 && answer==null) {
answer = getPositiveDocSet(matchAllDocsQuery);
}
// do negative queries first to shrink set size
for (int i=0; i<end; i++) {
if (neg[i]) answer = answer.andNot(sets[i]);
}
for (int i=0; i<end; i++) {
if (!neg[i] && i!=smallestIndex) answer = answer.intersection(sets[i]);
}
if (notCached != null) {
Collections.sort(notCached, sortByCost);
List<Weight> weights = new ArrayList<Weight>(notCached.size());
for (Query q : notCached) {
Query qq = QueryUtils.makeQueryable(q);
weights.add(createNormalizedWeight(qq));
}
pf.filter = new FilterImpl(answer, weights);
} else {
if (postFilters == null) {
if (answer == null) {
answer = getPositiveDocSet(matchAllDocsQuery);
}
// "answer" is the only part of the filter, so set it.
pf.answer = answer;
}
if (answer != null) {
pf.filter = answer.getTopFilter();
}
}
if (postFilters != null) {
Collections.sort(postFilters, sortByCost);
for (int i=postFilters.size()-1; i>=0; i--) {
DelegatingCollector prev = pf.postFilter;
pf.postFilter = ((PostFilter)postFilters.get(i)).getFilterCollector(this);
if (prev != null) pf.postFilter.setDelegate(prev);
}
}
return pf;
}
// query must be positive
protected DocSet getDocSetNC(Query query, DocSet filter) throws IOException {
DocSetCollector collector = new DocSetCollector(maxDoc()>>6, maxDoc());
if (filter==null) {
if (query instanceof TermQuery) {
Term t = ((TermQuery)query).getTerm();
SolrIndexReader[] readers = reader.getLeafReaders();
int[] offsets = reader.getLeafOffsets();
int[] arr = new int[256];
int[] freq = new int[256];
for (int i=0; i<readers.length; i++) {
SolrIndexReader sir = readers[i];
int offset = offsets[i];
collector.setNextReader(sir, offset);
TermDocs tdocs = sir.termDocs(t);
for(;;) {
int num = tdocs.read(arr, freq);
if (num==0) break;
for (int j=0; j<num; j++) {
collector.collect(arr[j]);
}
}
tdocs.close();
}
} else {
super.search(query,null,collector);
}
return collector.getDocSet();
} else {
Filter luceneFilter = filter.getTopFilter();
super.search(query, luceneFilter, collector);
return collector.getDocSet();
}
}
/** @lucene.internal */
public static class TermDocsState {
public TermEnum tenum;
public TermDocs tdocs;
}
// query must be positive
protected DocSet getDocSetNC(Query query, DocSet filter, TermDocsState tdState) throws IOException {
int smallSetSize = maxDoc()>>6;
int largestPossible = tdState.tenum.docFreq();
int[] docs = new int[Math.min(smallSetSize, largestPossible)];
OpenBitSet obs = null;
int upto=0;
int numBits = 0;
if (tdState.tdocs == null) {
tdState.tdocs = reader.termDocs();
}
tdState.tdocs.seek(tdState.tenum);
int[] arr = new int[Math.min(largestPossible, 256)];
int[] freq = new int[arr.length];
for(;;) {
int num = tdState.tdocs.read(arr, freq);
if (num==0) break;
if (upto + num > docs.length) {
if (obs == null) obs = new OpenBitSet(maxDoc());
for (int i = 0; i<num; i++) {
obs.fastSet(arr[i]);
}
numBits += num;
} else {
System.arraycopy(arr, 0, docs, upto, num);
upto += num;
}
}
if (obs != null) {
for (int i=0; i<upto; i++) {
obs.fastSet(docs[i]);
}
numBits += upto;
return new BitDocSet(obs, numBits);
}
return new SortedIntDocSet(docs, upto);
}
/**
* Returns the set of document ids matching both the query and the filter.
* This method is cache-aware and attempts to retrieve the answer from the cache if possible.
* If the answer was not cached, it may have been inserted into the cache as a result of this call.
* <p>
*
* @param query
* @param filter may be null
* @return DocSet meeting the specified criteria, should <b>not</b> be modified by the caller.
*/
public DocSet getDocSet(Query query, DocSet filter) throws IOException {
if (filter==null) return getDocSet(query);
if (query instanceof ExtendedQuery) {
ExtendedQuery eq = (ExtendedQuery)query;
if (!eq.getCache()) {
if (query instanceof WrappedQuery) {
query = ((WrappedQuery)query).getWrappedQuery();
}
query = QueryUtils.makeQueryable(query);
return getDocSetNC(query, filter);
}
}
// Negative query if absolute value different from original
Query absQ = QueryUtils.getAbs(query);
boolean positive = absQ==query;
DocSet first;
if (filterCache != null) {
first = this.filterCacheGet(absQ);
if (first==null) {
first = getDocSetNC(absQ,null);
this.filterCachePut(absQ,first);
}
return positive ? first.intersection(filter) : filter.andNot(first);
}
// If there isn't a cache, then do a single filtered query if positive.
return positive ? getDocSetNC(absQ,filter) : filter.andNot(getPositiveDocSet(absQ));
}
/**
* Converts a filter into a DocSet.
* This method is not cache-aware and no caches are checked.
*/
public DocSet convertFilter(Filter lfilter) throws IOException {
DocIdSet docSet = lfilter.getDocIdSet(this.reader);
OpenBitSet obs = new OpenBitSet();
DocIdSetIterator it = docSet.iterator();
int doc;
while((doc = it.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) {
obs.fastSet(doc);
}
return new BitDocSet(obs);
}
/**
* Returns documents matching both <code>query</code> and <code>filter</code>
* and sorted by <code>sort</code>.
* <p>
* This method is cache aware and may retrieve <code>filter</code> from
* the cache or make an insertion into the cache as a result of this call.
* <p>
* FUTURE: The returned DocList may be retrieved from a cache.
*
* @param query
* @param filter may be null
* @param lsort criteria by which to sort (if null, query relevance is used)
* @param offset offset into the list of documents to return
* @param len maximum number of documents to return
* @return DocList meeting the specified criteria, should <b>not</b> be modified by the caller.
* @throws IOException
*/
public DocList getDocList(Query query, Query filter, Sort lsort, int offset, int len) throws IOException {
QueryCommand qc = new QueryCommand();
qc.setQuery(query)
.setFilterList(filter)
.setSort(lsort)
.setOffset(offset)
.setLen(len);
QueryResult qr = new QueryResult();
search(qr,qc);
return qr.getDocList();
}
/**
* Returns documents matching both <code>query</code> and the
* intersection of the <code>filterList</code>, sorted by <code>sort</code>.
* <p>
* This method is cache aware and may retrieve <code>filter</code> from
* the cache or make an insertion into the cache as a result of this call.
* <p>
* FUTURE: The returned DocList may be retrieved from a cache.
*
* @param query
* @param filterList may be null
* @param lsort criteria by which to sort (if null, query relevance is used)
* @param offset offset into the list of documents to return
* @param len maximum number of documents to return
* @return DocList meeting the specified criteria, should <b>not</b> be modified by the caller.
* @throws IOException
*/
public DocList getDocList(Query query, List<Query> filterList, Sort lsort, int offset, int len, int flags) throws IOException {
QueryCommand qc = new QueryCommand();
qc.setQuery(query)
.setFilterList(filterList)
.setSort(lsort)
.setOffset(offset)
.setLen(len)
.setFlags(flags);
QueryResult qr = new QueryResult();
search(qr,qc);
return qr.getDocList();
}
private static final int NO_CHECK_QCACHE = 0x80000000;
public static final int GET_DOCSET = 0x40000000;
private static final int NO_CHECK_FILTERCACHE = 0x20000000;
private static final int NO_SET_QCACHE = 0x10000000;
public static final int GET_DOCLIST = 0x02;
public static final int GET_SCORES = 0x01;
/**
* getDocList version that uses+populates query and filter caches.
* In the event of a timeout, the cache is not populated.
*/
private void getDocListC(QueryResult qr, QueryCommand cmd) throws IOException {
// old parameters: DocListAndSet out, Query query, List<Query> filterList, DocSet filter, Sort lsort, int offset, int len, int flags, long timeAllowed, NamedList<Object> responseHeader
DocListAndSet out = new DocListAndSet();
qr.setDocListAndSet(out);
QueryResultKey key=null;
int maxDocRequested = cmd.getOffset() + cmd.getLen();
// check for overflow, and check for # docs in index
if (maxDocRequested < 0 || maxDocRequested > maxDoc()) maxDocRequested = maxDoc();
int supersetMaxDoc= maxDocRequested;
DocList superset = null;
int flags = cmd.getFlags();
Query q = cmd.getQuery();
if (q instanceof ExtendedQuery) {
ExtendedQuery eq = (ExtendedQuery)q;
if (!eq.getCache()) {
flags |= (NO_CHECK_QCACHE | NO_SET_QCACHE | NO_CHECK_FILTERCACHE);
}
}
// we can try and look up the complete query in the cache.
// we can't do that if filter!=null though (we don't want to
// do hashCode() and equals() for a big DocSet).
if (queryResultCache != null && cmd.getFilter()==null
&& (flags & (NO_CHECK_QCACHE|NO_SET_QCACHE)) != ((NO_CHECK_QCACHE|NO_SET_QCACHE)))
{
// all of the current flags can be reused during warming,
// so set all of them on the cache key.
key = new QueryResultKey(q, cmd.getFilterList(), cmd.getSort(), flags);
if ((flags & NO_CHECK_QCACHE)==0) {
superset = queryResultCache.get(key);
if (superset != null) {
// check that the cache entry has scores recorded if we need them
if ((flags & GET_SCORES)==0 || superset.hasScores()) {
// NOTE: subset() returns null if the DocList has fewer docs than
// requested
out.docList = superset.subset(cmd.getOffset(),cmd.getLen());
}
}
if (out.docList != null) {
// found the docList in the cache... now check if we need the docset too.
// OPT: possible future optimization - if the doclist contains all the matches,
// use it to make the docset instead of rerunning the query.
if (out.docSet==null && ((flags & GET_DOCSET)!=0) ) {
if (cmd.getFilterList()==null) {
out.docSet = getDocSet(cmd.getQuery());
} else {
List<Query> newList = new ArrayList<Query>(cmd.getFilterList().size()+1);
newList.add(cmd.getQuery());
newList.addAll(cmd.getFilterList());
out.docSet = getDocSet(newList);
}
}
return;
}
}
// If we are going to generate the result, bump up to the
// next resultWindowSize for better caching.
if ((flags & NO_SET_QCACHE) == 0) {
// handle 0 special case as well as avoid idiv in the common case.
if (maxDocRequested < queryResultWindowSize) {
supersetMaxDoc=queryResultWindowSize;
} else {
supersetMaxDoc = ((maxDocRequested -1)/queryResultWindowSize + 1)*queryResultWindowSize;
if (supersetMaxDoc < 0) supersetMaxDoc=maxDocRequested;
}
} else {
key = null; // we won't be caching the result
}
}
// OK, so now we need to generate an answer.
// One way to do that would be to check if we have an unordered list
// of results for the base query. If so, we can apply the filters and then
// sort by the resulting set. This can only be used if:
// - the sort doesn't contain score
// - we don't want score returned.
// check if we should try and use the filter cache
boolean useFilterCache=false;
if ((flags & (GET_SCORES|NO_CHECK_FILTERCACHE))==0 && useFilterForSortedQuery && cmd.getSort() != null && filterCache != null) {
useFilterCache=true;
SortField[] sfields = cmd.getSort().getSort();
for (SortField sf : sfields) {
if (sf.getType() == SortField.SCORE) {
useFilterCache=false;
break;
}
}
}
// disable useFilterCache optimization temporarily
if (useFilterCache) {
// now actually use the filter cache.
// for large filters that match few documents, this may be
// slower than simply re-executing the query.
if (out.docSet == null) {
out.docSet = getDocSet(cmd.getQuery(),cmd.getFilter());
DocSet bigFilt = getDocSet(cmd.getFilterList());
if (bigFilt != null) out.docSet = out.docSet.intersection(bigFilt);
}
// todo: there could be a sortDocSet that could take a list of
// the filters instead of anding them first...
// perhaps there should be a multi-docset-iterator
superset = sortDocSet(out.docSet,cmd.getSort(),supersetMaxDoc);
out.docList = superset.subset(cmd.getOffset(),cmd.getLen());
} else {
// do it the normal way...
cmd.setSupersetMaxDoc(supersetMaxDoc);
if ((flags & GET_DOCSET)!=0) {
// this currently conflates returning the docset for the base query vs
// the base query and all filters.
DocSet qDocSet = getDocListAndSetNC(qr,cmd);
// cache the docSet matching the query w/o filtering
if (qDocSet!=null && filterCache!=null && !qr.isPartialResults()) this.filterCachePut(cmd.getQuery(),qDocSet);
} else {
getDocListNC(qr,cmd);
//Parameters: cmd.getQuery(),theFilt,cmd.getSort(),0,supersetMaxDoc,cmd.getFlags(),cmd.getTimeAllowed(),responseHeader);
}
superset = out.docList;
out.docList = superset.subset(cmd.getOffset(),cmd.getLen());
}
// lastly, put the superset in the cache if the size is less than or equal
// to queryResultMaxDocsCached
if (key != null && superset.size() <= queryResultMaxDocsCached && !qr.isPartialResults()) {
queryResultCache.put(key, superset);
}
}
private void getDocListNC(QueryResult qr,QueryCommand cmd) throws IOException {
final long timeAllowed = cmd.getTimeAllowed();
int len = cmd.getSupersetMaxDoc();
int last = len;
if (last < 0 || last > maxDoc()) last=maxDoc();
final int lastDocRequested = last;
int nDocsReturned;
int totalHits;
float maxScore;
int[] ids;
float[] scores;
double sum=0.0f;
double max=Double.NEGATIVE_INFINITY;
double min=Double.POSITIVE_INFINITY;
boolean needScores = (cmd.getFlags() & GET_SCORES) != 0;
Query query = QueryUtils.makeQueryable(cmd.getQuery());
ProcessedFilter pf = getProcessedFilter(cmd.getFilter(), cmd.getFilterList());
final Filter luceneFilter = pf.filter;
// handle zero case...
if (lastDocRequested<=0) {
final float[] topscore = new float[] { Float.NEGATIVE_INFINITY };
final int[] numHits = new int[1];
Collector collector;
if (!needScores) {
collector = new Collector () {
@Override
public void setScorer(Scorer scorer) throws IOException {
}
@Override
public void collect(int doc) throws IOException {
numHits[0]++;
}
@Override
public void setNextReader(IndexReader reader, int docBase) throws IOException {
}
@Override
public boolean acceptsDocsOutOfOrder() {
return true;
}
};
} else {
collector = new Collector() {
Scorer scorer;
@Override
public void setScorer(Scorer scorer) throws IOException {
this.scorer = scorer;
}
@Override
public void collect(int doc) throws IOException {
numHits[0]++;
float score = scorer.score();
if (score > topscore[0]) topscore[0]=score;
}
@Override
public void setNextReader(IndexReader reader, int docBase) throws IOException {
}
@Override
public boolean acceptsDocsOutOfOrder() {
return true;
}
};
}
if( timeAllowed > 0 ) {
collector = new TimeLimitingCollector(collector, TimeLimitingCollector.getGlobalCounter(), timeAllowed);
}
if (pf.postFilter != null) {
pf.postFilter.setLastDelegate(collector);
collector = pf.postFilter;
}
try {
super.search(query, luceneFilter, collector);
}
catch( TimeLimitingCollector.TimeExceededException x ) {
log.warn( "Query: " + query + "; " + x.getMessage() );
qr.setPartialResults(true);
}
nDocsReturned=0;
ids = new int[nDocsReturned];
scores = new float[nDocsReturned];
totalHits = numHits[0];
maxScore = totalHits>0 ? topscore[0] : 0.0f;
} else {
TopDocsCollector topCollector;
if (cmd.getSort() == null) {
topCollector = TopScoreDocCollector.create(len, true);
} else {
topCollector = StatisticFieldCollector.create(weightSort(cmd.getSort()), len, false, needScores, needScores, true);
}
Collector collector = topCollector;
if( timeAllowed > 0 ) {
collector = new TimeLimitingCollector(collector, TimeLimitingCollector.getGlobalCounter(), timeAllowed);
}
if (pf.postFilter != null) {
pf.postFilter.setLastDelegate(collector);
collector = pf.postFilter;
}
try {
super.search(query, luceneFilter, collector);
}
catch( TimeLimitingCollector.TimeExceededException x ) {
log.warn( "Query: " + query + "; " + x.getMessage() );
qr.setPartialResults(true);
}
totalHits = topCollector.getTotalHits();
TopDocs topDocs = topCollector.topDocs(0, len);
maxScore = totalHits>0 ? topDocs.getMaxScore() : 0.0f;
nDocsReturned = topDocs.scoreDocs.length;
sum = topCollector.getSum();
max = topCollector.getMax();
min = topCollector.getMin();
ids = new int[nDocsReturned];
scores = (cmd.getFlags()&GET_SCORES)!=0 ? new float[nDocsReturned] : null;
for (int i=0; i<nDocsReturned; i++) {
ScoreDoc scoreDoc = topDocs.scoreDocs[i];
ids[i] = scoreDoc.doc;
if (scores != null) scores[i] = scoreDoc.score;
}
}
int sliceLen = Math.min(lastDocRequested,nDocsReturned);
if (sliceLen < 0) sliceLen=0;
qr.setDocList(new DocSlice(0,sliceLen,ids,scores,totalHits,maxScore,
sum, max, min));
}
// any DocSet returned is for the query only, without any filtering... that way it may
// be cached if desired.
private DocSet getDocListAndSetNC(QueryResult qr,QueryCommand cmd) throws IOException {
int len = cmd.getSupersetMaxDoc();
int last = len;
if (last < 0 || last > maxDoc()) last=maxDoc();
final int lastDocRequested = last;
int nDocsReturned;
int totalHits;
float maxScore;
int[] ids;
float[] scores;
DocSet set;
double sum = 0.0f;
double max = Double.NEGATIVE_INFINITY;
double min = Double.POSITIVE_INFINITY;
boolean needScores = (cmd.getFlags() & GET_SCORES) != 0;
int maxDoc = maxDoc();
int smallSetSize = maxDoc>>6;
ProcessedFilter pf = getProcessedFilter(cmd.getFilter(), cmd.getFilterList());
final Filter luceneFilter = pf.filter;
Query query = QueryUtils.makeQueryable(cmd.getQuery());
final long timeAllowed = cmd.getTimeAllowed();
// handle zero case...
if (lastDocRequested<=0) {
final float[] topscore = new float[] { Float.NEGATIVE_INFINITY };
Collector collector;
DocSetCollector setCollector;
if (!needScores) {
collector = setCollector = new DocSetCollector(smallSetSize, maxDoc);
} else {
collector = setCollector = new DocSetDelegateCollector(smallSetSize, maxDoc, new Collector() {
Scorer scorer;
@Override
public void setScorer(Scorer scorer) throws IOException {
this.scorer = scorer;
}
@Override
public void collect(int doc) throws IOException {
float score = scorer.score();
if (score > topscore[0]) topscore[0]=score;
}
@Override
public void setNextReader(IndexReader reader, int docBase) throws IOException {
}
@Override
public boolean acceptsDocsOutOfOrder() {
return false;
}
});
}
if( timeAllowed > 0 ) {
collector = new TimeLimitingCollector(collector, TimeLimitingCollector.getGlobalCounter(), timeAllowed);
}
if (pf.postFilter != null) {
pf.postFilter.setLastDelegate(collector);
collector = pf.postFilter;
}
try {
super.search(query, luceneFilter, collector);
}
catch( TimeLimitingCollector.TimeExceededException x ) {
log.warn( "Query: " + query + "; " + x.getMessage() );
qr.setPartialResults(true);
}
set = setCollector.getDocSet();
nDocsReturned = 0;
ids = new int[nDocsReturned];
scores = new float[nDocsReturned];
totalHits = set.size();
maxScore = totalHits>0 ? topscore[0] : 0.0f;
} else {
TopDocsCollector topCollector;
if (cmd.getSort() == null) {
topCollector = TopScoreDocCollector.create(len, true);
} else {
topCollector = StatisticFieldCollector.create(weightSort(cmd.getSort()), len, false, needScores, needScores, true);
}
DocSetCollector setCollector = new DocSetDelegateCollector(maxDoc>>6, maxDoc, topCollector);
Collector collector = setCollector;
if( timeAllowed > 0 ) {
collector = new TimeLimitingCollector(collector, TimeLimitingCollector.getGlobalCounter(), timeAllowed );
}
if (pf.postFilter != null) {
pf.postFilter.setLastDelegate(collector);
collector = pf.postFilter;
}
try {
super.search(query, luceneFilter, collector);
}
catch( TimeLimitingCollector.TimeExceededException x ) {
log.warn( "Query: " + query + "; " + x.getMessage() );
qr.setPartialResults(true);
}
set = setCollector.getDocSet();
totalHits = topCollector.getTotalHits();
assert(totalHits == set.size());
sum = topCollector.getSum();
max = topCollector.getMax();
min = topCollector.getMin();
TopDocs topDocs = topCollector.topDocs(0, len);
maxScore = totalHits>0 ? topDocs.getMaxScore() : 0.0f;
nDocsReturned = topDocs.scoreDocs.length;
ids = new int[nDocsReturned];
scores = (cmd.getFlags()&GET_SCORES)!=0 ? new float[nDocsReturned] : null;
for (int i=0; i<nDocsReturned; i++) {
ScoreDoc scoreDoc = topDocs.scoreDocs[i];
ids[i] = scoreDoc.doc;
if (scores != null) scores[i] = scoreDoc.score;
}
}
int sliceLen = Math.min(lastDocRequested,nDocsReturned);
if (sliceLen < 0) sliceLen=0;
qr.setDocList(new DocSlice(0,sliceLen,ids,scores,totalHits,maxScore,
sum, max, min));
// TODO: if we collect results before the filter, we just need to intersect with
// that filter to generate the DocSet for qr.setDocSet()
qr.setDocSet(set);
// TODO: currently we don't generate the DocSet for the base query,
// but the QueryDocSet == CompleteDocSet if filter==null.
return pf.filter==null && pf.postFilter==null ? qr.getDocSet() : null;
}
/**
* Returns documents matching both <code>query</code> and <code>filter</code>
* and sorted by <code>sort</code>.
* FUTURE: The returned DocList may be retrieved from a cache.
*
* @param query
* @param filter may be null
* @param lsort criteria by which to sort (if null, query relevance is used)
* @param offset offset into the list of documents to return
* @param len maximum number of documents to return
* @return DocList meeting the specified criteria, should <b>not</b> be modified by the caller.
* @throws IOException
*/
public DocList getDocList(Query query, DocSet filter, Sort lsort, int offset, int len) throws IOException {
QueryCommand qc = new QueryCommand();
qc.setQuery(query)
.setFilter(filter)
.setSort(lsort)
.setOffset(offset)
.setLen(len);
QueryResult qr = new QueryResult();
search(qr,qc);
return qr.getDocList();
}
/**
* Returns documents matching both <code>query</code> and <code>filter</code>
* and sorted by <code>sort</code>. Also returns the complete set of documents
* matching <code>query</code> and <code>filter</code> (regardless of <code>offset</code> and <code>len</code>).
* <p>
* This method is cache aware and may retrieve <code>filter</code> from
* the cache or make an insertion into the cache as a result of this call.
* <p>
* FUTURE: The returned DocList may be retrieved from a cache.
* <p>
* The DocList and DocSet returned should <b>not</b> be modified.
*
* @param query
* @param filter may be null
* @param lsort criteria by which to sort (if null, query relevance is used)
* @param offset offset into the list of documents to return
* @param len maximum number of documents to return
* @return DocListAndSet meeting the specified criteria, should <b>not</b> be modified by the caller.
* @throws IOException
*/
public DocListAndSet getDocListAndSet(Query query, Query filter, Sort lsort, int offset, int len) throws IOException {
QueryCommand qc = new QueryCommand();
qc.setQuery(query)
.setFilterList(filter)
.setSort(lsort)
.setOffset(offset)
.setLen(len)
.setNeedDocSet(true);
QueryResult qr = new QueryResult();
search(qr,qc);
return qr.getDocListAndSet();
}
/**
* Returns documents matching both <code>query</code> and <code>filter</code>
* and sorted by <code>sort</code>. Also returns the compete set of documents
* matching <code>query</code> and <code>filter</code> (regardless of <code>offset</code> and <code>len</code>).
* <p>
* This method is cache aware and may retrieve <code>filter</code> from
* the cache or make an insertion into the cache as a result of this call.
* <p>
* FUTURE: The returned DocList may be retrieved from a cache.
* <p>
* The DocList and DocSet returned should <b>not</b> be modified.
*
* @param query
* @param filter may be null
* @param lsort criteria by which to sort (if null, query relevance is used)
* @param offset offset into the list of documents to return
* @param len maximum number of documents to return
* @param flags user supplied flags for the result set
* @return DocListAndSet meeting the specified criteria, should <b>not</b> be modified by the caller.
* @throws IOException
*/
public DocListAndSet getDocListAndSet(Query query, Query filter, Sort lsort, int offset, int len, int flags) throws IOException {
QueryCommand qc = new QueryCommand();
qc.setQuery(query)
.setFilterList(filter)
.setSort(lsort)
.setOffset(offset)
.setLen(len)
.setFlags(flags)
.setNeedDocSet(true);
QueryResult qr = new QueryResult();
search(qr,qc);
return qr.getDocListAndSet();
}
/**
* Returns documents matching both <code>query</code> and the intersection
* of <code>filterList</code>, sorted by <code>sort</code>.
* Also returns the compete set of documents
* matching <code>query</code> and <code>filter</code>
* (regardless of <code>offset</code> and <code>len</code>).
* <p>
* This method is cache aware and may retrieve <code>filter</code> from
* the cache or make an insertion into the cache as a result of this call.
* <p>
* FUTURE: The returned DocList may be retrieved from a cache.
* <p>
* The DocList and DocSet returned should <b>not</b> be modified.
*
* @param query
* @param filterList may be null
* @param lsort criteria by which to sort (if null, query relevance is used)
* @param offset offset into the list of documents to return
* @param len maximum number of documents to return
* @return DocListAndSet meeting the specified criteria, should <b>not</b> be modified by the caller.
* @throws IOException
*/
public DocListAndSet getDocListAndSet(Query query, List<Query> filterList, Sort lsort, int offset, int len) throws IOException {
QueryCommand qc = new QueryCommand();
qc.setQuery(query)
.setFilterList(filterList)
.setSort(lsort)
.setOffset(offset)
.setLen(len)
.setNeedDocSet(true);
QueryResult qr = new QueryResult();
search(qr,qc);
return qr.getDocListAndSet();
}
/**
* Returns documents matching both <code>query</code> and the intersection
* of <code>filterList</code>, sorted by <code>sort</code>.
* Also returns the compete set of documents
* matching <code>query</code> and <code>filter</code>
* (regardless of <code>offset</code> and <code>len</code>).
* <p>
* This method is cache aware and may retrieve <code>filter</code> from
* the cache or make an insertion into the cache as a result of this call.
* <p>
* FUTURE: The returned DocList may be retrieved from a cache.
* <p>
* The DocList and DocSet returned should <b>not</b> be modified.
*
* @param query
* @param filterList may be null
* @param lsort criteria by which to sort (if null, query relevance is used)
* @param offset offset into the list of documents to return
* @param len maximum number of documents to return
* @param flags user supplied flags for the result set
* @return DocListAndSet meeting the specified criteria, should <b>not</b> be modified by the caller.
* @throws IOException
*/
public DocListAndSet getDocListAndSet(Query query, List<Query> filterList, Sort lsort, int offset, int len, int flags) throws IOException {
QueryCommand qc = new QueryCommand();
qc.setQuery(query)
.setFilterList(filterList)
.setSort(lsort)
.setOffset(offset)
.setLen(len)
.setFlags(flags)
.setNeedDocSet(true);
QueryResult qr = new QueryResult();
search(qr,qc);
return qr.getDocListAndSet();
}
/**
* Returns documents matching both <code>query</code> and <code>filter</code>
* and sorted by <code>sort</code>. Also returns the compete set of documents
* matching <code>query</code> and <code>filter</code> (regardless of <code>offset</code> and <code>len</code>).
* <p>
* FUTURE: The returned DocList may be retrieved from a cache.
*
* @param query
* @param filter may be null
* @param lsort criteria by which to sort (if null, query relevance is used)
* @param offset offset into the list of documents to return
* @param len maximum number of documents to return
* @return DocListAndSet meeting the specified criteria, should <b>not</b> be modified by the caller.
* @throws IOException
*/
public DocListAndSet getDocListAndSet(Query query, DocSet filter, Sort lsort, int offset, int len) throws IOException {
QueryCommand qc = new QueryCommand();
qc.setQuery(query)
.setFilter(filter)
.setSort(lsort)
.setOffset(offset)
.setLen(len)
.setNeedDocSet(true);
QueryResult qr = new QueryResult();
search(qr,qc);
return qr.getDocListAndSet();
}
/**
* Returns documents matching both <code>query</code> and <code>filter</code>
* and sorted by <code>sort</code>. Also returns the compete set of documents
* matching <code>query</code> and <code>filter</code> (regardless of <code>offset</code> and <code>len</code>).
* <p>
* This method is cache aware and may make an insertion into the cache
* as a result of this call.
* <p>
* FUTURE: The returned DocList may be retrieved from a cache.
* <p>
* The DocList and DocSet returned should <b>not</b> be modified.
*
* @param query
* @param filter may be null
* @param lsort criteria by which to sort (if null, query relevance is used)
* @param offset offset into the list of documents to return
* @param len maximum number of documents to return
* @param flags user supplied flags for the result set
* @return DocListAndSet meeting the specified criteria, should <b>not</b> be modified by the caller.
* @throws IOException
*/
public DocListAndSet getDocListAndSet(Query query, DocSet filter, Sort lsort, int offset, int len, int flags) throws IOException {
QueryCommand qc = new QueryCommand();
qc.setQuery(query)
.setFilter(filter)
.setSort(lsort)
.setOffset(offset)
.setLen(len)
.setFlags(flags)
.setNeedDocSet(true);
QueryResult qr = new QueryResult();
search(qr,qc);
return qr.getDocListAndSet();
}
protected DocList sortDocSet(DocSet set, Sort sort, int nDocs) throws IOException {
// bit of a hack to tell if a set is sorted - do it better in the futute.
boolean inOrder = set instanceof BitDocSet || set instanceof SortedIntDocSet;
TopDocsCollector topCollector = StatisticFieldCollector.create(weightSort(sort), nDocs, false, false, false, inOrder);
DocIterator iter = set.iterator();
int base=0;
int end=0;
int readerIndex = -1;
SolrIndexReader r=null;
while(iter.hasNext()) {
int doc = iter.nextDoc();
while (doc>=end) {
r = reader.getLeafReaders()[++readerIndex];
base = reader.getLeafOffsets()[readerIndex];
end = base + r.maxDoc();
topCollector.setNextReader(r, base);
// we should never need to set the scorer given the settings for the collector
}
topCollector.collect(doc-base);
}
TopDocs topDocs = topCollector.topDocs(0, nDocs);
int nDocsReturned = topDocs.scoreDocs.length;
int[] ids = new int[nDocsReturned];
for (int i=0; i<nDocsReturned; i++) {
ScoreDoc scoreDoc = topDocs.scoreDocs[i];
ids[i] = scoreDoc.doc;
}
return new DocSlice(0,nDocsReturned,ids,null,topDocs.totalHits,0.0f,
topCollector.getSum(), topCollector.getMax(), topCollector.getMin());
}
/**
* Returns the number of documents that match both <code>a</code> and <code>b</code>.
* <p>
* This method is cache-aware and may check as well as modify the cache.
*
* @param a
* @param b
* @return the numer of documents in the intersection between <code>a</code> and <code>b</code>.
* @throws IOException
*/
public int numDocs(Query a, DocSet b) throws IOException {
// Negative query if absolute value different from original
Query absQ = QueryUtils.getAbs(a);
DocSet positiveA = getPositiveDocSet(absQ);
return a==absQ ? b.intersectionSize(positiveA) : b.andNotSize(positiveA);
}
/**
* Returns the number of documents that match both <code>a</code> and <code>b</code>.
* <p>
* This method is cache-aware and may check as well as modify the cache.
*
* @param a
* @param b
* @return the numer of documents in the intersection between <code>a</code> and <code>b</code>.
* @throws IOException
*/
public int numDocs(Query a, Query b) throws IOException {
Query absA = QueryUtils.getAbs(a);
Query absB = QueryUtils.getAbs(b);
DocSet positiveA = getPositiveDocSet(absA);
DocSet positiveB = getPositiveDocSet(absB);
// Negative query if absolute value different from original
if (a==absA) {
if (b==absB) return positiveA.intersectionSize(positiveB);
return positiveA.andNotSize(positiveB);
}
if (b==absB) return positiveB.andNotSize(positiveA);
// if both negative, we need to create a temp DocSet since we
// don't have a counting method that takes three.
DocSet all = getPositiveDocSet(matchAllDocsQuery);
// -a -b == *:*.andNot(a).andNotSize(b) == *.*.andNotSize(a.union(b))
// we use the last form since the intermediate DocSet should normally be smaller.
return all.andNotSize(positiveA.union(positiveB));
}
/**
* Takes a list of docs (the doc ids actually), and returns an array
* of Documents containing all of the stored fields.
*/
public Document[] readDocs(DocList ids) throws IOException {
Document[] docs = new Document[ids.size()];
readDocs(docs,ids);
return docs;
}
/**
* Warm this searcher based on an old one (primarily for auto-cache warming).
*/
public void warm(SolrIndexSearcher old) throws IOException {
// // Make sure this is first! filters can help queryResults execute!
// boolean logme = log.isInfoEnabled();
// long warmingStartTime = System.currentTimeMillis();
// // warm the caches in order...
// for (int i=0; i<cacheList.length; i++) {
// if (logme) log.info("autowarming " + this + " from " + old + "\n\t" + old.cacheList[i]);
// this.cacheList[i].warm(this, old.cacheList[i]);
// if (logme) log.info("autowarming result for " + this + "\n\t" + this.cacheList[i]);
// }
// warmupTime = System.currentTimeMillis() - warmingStartTime;
}
public long getOpenTime() {
return openTime;
}
@Override
public Explanation explain(Query query, int doc) throws IOException {
return super.explain(QueryUtils.makeQueryable(query), doc);
}
/////////////////////////////////////////////////////////////////////
// SolrInfoMBean stuff: Statistics and Module Info
/////////////////////////////////////////////////////////////////////
public String getName() {
return SolrIndexSearcher.class.getName();
}
public String getVersion() {
return SolrCore.version;
}
public String getDescription() {
return "index searcher";
}
public Category getCategory() {
return Category.CORE;
}
public String getSourceId() {
return "$Id: SolrIndexSearcher.java 1201291 2011-11-12 18:02:03Z simonw $";
}
public String getSource() {
return "$URL: https://svn.apache.org/repos/asf/lucene/dev/branches/lucene_solr_3_5/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java $";
}
public URL[] getDocs() {
return null;
}
public NamedList getStatistics() {
NamedList lst = new SimpleOrderedMap();
lst.add("searcherName", name);
lst.add("numDocs", reader.numDocs());
lst.add("maxDoc", reader.maxDoc());
lst.add("reader", reader.toString());
lst.add("readerDir", reader.directory());
lst.add("indexVersion", reader.getVersion());
lst.add("openedAt", new Date(openTime));
if (registerTime!=0) lst.add("registeredAt", new Date(registerTime));
lst.add("warmupTime", warmupTime);
return lst;
}
/**
* A query request command to avoid having to change the method signatures
* if we want to pass additional information to the searcher.
*/
public static class QueryCommand {
private Query query;
private List<Query> filterList;
private DocSet filter;
private Sort sort;
private int offset;
private int len;
private int supersetMaxDoc;
private int flags;
private long timeAllowed = -1;
public Query getQuery() { return query; }
public QueryCommand setQuery(Query query) {
this.query = query;
return this;
}
public List<Query> getFilterList() { return filterList; }
/**
* @throws IllegalArgumentException if filter is not null.
*/
public QueryCommand setFilterList(List<Query> filterList) {
if( filter != null ) {
throw new IllegalArgumentException( "Either filter or filterList may be set in the QueryCommand, but not both." );
}
this.filterList = filterList;
return this;
}
/**
* A simple setter to build a filterList from a query
* @throws IllegalArgumentException if filter is not null.
*/
public QueryCommand setFilterList(Query f) {
if( filter != null ) {
throw new IllegalArgumentException( "Either filter or filterList may be set in the QueryCommand, but not both." );
}
filterList = null;
if (f != null) {
filterList = new ArrayList<Query>(2);
filterList.add(f);
}
return this;
}
public DocSet getFilter() { return filter; }
/**
* @throws IllegalArgumentException if filterList is not null.
*/
public QueryCommand setFilter(DocSet filter) {
if( filterList != null ) {
throw new IllegalArgumentException( "Either filter or filterList may be set in the QueryCommand, but not both." );
}
this.filter = filter;
return this;
}
public Sort getSort() { return sort; }
public QueryCommand setSort(Sort sort) {
this.sort = sort;
return this;
}
public int getOffset() { return offset; }
public QueryCommand setOffset(int offset) {
this.offset = offset;
return this;
}
public int getLen() { return len; }
public QueryCommand setLen(int len) {
this.len = len;
return this;
}
public int getSupersetMaxDoc() { return supersetMaxDoc; }
public QueryCommand setSupersetMaxDoc(int supersetMaxDoc) {
this.supersetMaxDoc = supersetMaxDoc;
return this;
}
public int getFlags() {
return flags;
}
public QueryCommand replaceFlags(int flags) {
this.flags = flags;
return this;
}
public QueryCommand setFlags(int flags) {
this.flags |= flags;
return this;
}
public QueryCommand clearFlags(int flags) {
this.flags &= ~flags;
return this;
}
public long getTimeAllowed() { return timeAllowed; }
public QueryCommand setTimeAllowed(long timeAllowed) {
this.timeAllowed = timeAllowed;
return this;
}
public boolean isNeedDocSet() { return (flags & GET_DOCSET) != 0; }
public QueryCommand setNeedDocSet(boolean needDocSet) {
return needDocSet ? setFlags(GET_DOCSET) : clearFlags(GET_DOCSET);
}
}
/**
* The result of a search.
*/
public static class QueryResult {
private boolean partialResults;
private DocListAndSet docListAndSet;
public Object groupedResults; // Todo: Refactor. At least getter setter and different type.
public DocList getDocList() { return docListAndSet.docList; }
public void setDocList(DocList list) {
if( docListAndSet == null ) {
docListAndSet = new DocListAndSet();
}
docListAndSet.docList = list;
}
public DocSet getDocSet() { return docListAndSet.docSet; }
public void setDocSet(DocSet set) {
if( docListAndSet == null ) {
docListAndSet = new DocListAndSet();
}
docListAndSet.docSet = set;
}
public boolean isPartialResults() { return partialResults; }
public void setPartialResults(boolean partialResults) { this.partialResults = partialResults; }
public void setDocListAndSet( DocListAndSet listSet ) { docListAndSet = listSet; }
public DocListAndSet getDocListAndSet() { return docListAndSet; }
}
public static void initRegenerators(SolrConfig solrConfig) {
if (solrConfig.fieldValueCacheConfig != null && solrConfig.fieldValueCacheConfig.getRegenerator() == null) {
solrConfig.fieldValueCacheConfig.setRegenerator(null);
}
if (solrConfig.filterCacheConfig != null && solrConfig.filterCacheConfig.getRegenerator() == null) {
solrConfig.filterCacheConfig.setRegenerator(null);
}
if(solrConfig.userCacheConfigs!=null)
{
for(CacheConfig conf:solrConfig.userCacheConfigs)
{
if(conf!=null&&conf.getRegenerator()==null)
{
conf.setRegenerator(null);
}
}
}
if (solrConfig.queryResultCacheConfig != null && solrConfig.queryResultCacheConfig.getRegenerator() == null) {
solrConfig.queryResultCacheConfig.setRegenerator(null);
}
}
// public void cacheDocSet(Query query, DocSet optionalAnswer, boolean mustCache) throws IOException {
// // Even if the cache is null, still compute the DocSet as it may serve to warm the Lucene
// // or OS disk cache.
// if (optionalAnswer != null) {
// this.filterCachePut(query, optionalAnswer);
// return;
// }
//
// // Throw away the result, relying on the fact that getDocSet
// // will currently always cache what it found. If getDocSet() starts
// // using heuristics about what to cache, and mustCache==true, (or if we
// // want this method to start using heuristics too) then
// // this needs to change.
// getDocSet(query);
// }
public Collection<String> getFieldNames() {
return fieldNames;
}
public Collection<String> getStoredHighlightFieldNames() {
if (storedHighlightFieldNames == null) {
storedHighlightFieldNames = new LinkedList<String>();
for (String fieldName : fieldNames) {
try {
SchemaField field = schema.getField(fieldName);
if (field.stored() &&
((field.getType() instanceof org.apache.solr.schema.TextField) ||
(field.getType() instanceof org.apache.solr.schema.StrField))) {
storedHighlightFieldNames.add(fieldName);
}
} catch (RuntimeException e) { // getField() throws a SolrException, but it arrives as a RuntimeException
log.warn("Field \"" + fieldName + "\" found in index, but not defined in schema.");
}
}
}
return storedHighlightFieldNames;
}
}
class FilterImpl extends Filter {
final DocSet filter;
final Filter topFilter;
final List<Weight> weights;
public FilterImpl(DocSet filter, List<Weight> weights) {
this.filter = filter;
this.weights = weights;
this.topFilter = filter == null ? null : filter.getTopFilter();
}
@Override
public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
DocIdSet sub = topFilter == null ? null : topFilter.getDocIdSet(reader);
if (weights.size() == 0) return sub;
return new FilterSet(sub, reader);
}
private class FilterSet extends DocIdSet {
DocIdSet docIdSet;
IndexReader reader;
public FilterSet(DocIdSet docIdSet, IndexReader reader) {
this.docIdSet = docIdSet;
this.reader = reader;
}
@Override
public DocIdSetIterator iterator() throws IOException {
List<DocIdSetIterator> iterators = new ArrayList<DocIdSetIterator>(weights.size()+1);
if (docIdSet != null) {
DocIdSetIterator iter = docIdSet.iterator();
if (iter == null) return null;
iterators.add(iter);
}
for (Weight w : weights) {
Scorer scorer = w.scorer(reader, true, false);
if (scorer == null) return null;
iterators.add(scorer);
}
if (iterators.size()==0) return null;
if (iterators.size()==1) return iterators.get(0);
if (iterators.size()==2) return new DualFilterIterator(iterators.get(0), iterators.get(1));
return new FilterIterator(iterators.toArray(new DocIdSetIterator[iterators.size()]));
}
}
private static class FilterIterator extends DocIdSetIterator {
final DocIdSetIterator[] iterators;
final DocIdSetIterator first;
public FilterIterator(DocIdSetIterator[] iterators) {
this.iterators = iterators;
this.first = iterators[0];
}
@Override
public int docID() {
return first.docID();
}
private int doNext(int doc) throws IOException {
int which=0; // index of the iterator with the highest id
int i=1;
outer: for(;;) {
for (; i<iterators.length; i++) {
if (i == which) continue;
DocIdSetIterator iter = iterators[i];
int next = iter.advance(doc);
if (next != doc) {
doc = next;
which = i;
i = 0;
continue outer;
}
}
return doc;
}
}
@Override
public int nextDoc() throws IOException {
return doNext(first.nextDoc());
}
@Override
public int advance(int target) throws IOException {
return doNext(first.advance(target));
}
}
private static class DualFilterIterator extends DocIdSetIterator {
final DocIdSetIterator a;
final DocIdSetIterator b;
public DualFilterIterator(DocIdSetIterator a, DocIdSetIterator b) {
this.a = a;
this.b = b;
}
@Override
public int docID() {
return a.docID();
}
@Override
public int nextDoc() throws IOException {
int doc = a.nextDoc();
for(;;) {
int other = b.advance(doc);
if (other == doc) return doc;
doc = a.advance(other);
if (other == doc) return doc;
}
}
@Override
public int advance(int target) throws IOException {
int doc = a.advance(target);
for(;;) {
int other = b.advance(doc);
if (other == doc) return doc;
doc = a.advance(other);
if (other == doc) return doc;
}
}
}
}
|
package edu.jhu.thrax.util.exceptions;
public class MalformedInputException extends Exception
{
private static final long serialVersionUID = 5544L;
public MalformedInputException()
{
super();
}
public MalformedInputException(String input)
{
super(input);
}
}
|
/**
* THE INFORMATION AND SPECIFICATIONS IN THIS DEVELOPER KIT ARE SUBJECT TO CHANGE WITHOUT NOTICE.
* ALL INFORMATION AND SPECIFICATIONS IN THIS DEVELOPER KIT ARE PRESENTED WITHOUT WARRANTY OF ANY
* KIND, EXPRESS OR IMPLIED. YOU TAKE FULL RESPONSIBILITY FOR YOUR USE OF THE DEVELOPER KIT.
* THE DEVELOPER KIT IS LICENSED TO YOU UNDER THE THEN-CURRENT LICENSE TERMS FOR THE DEVELOPER
* KIT IN EFFECT AT THE TIME THE DEVELOPER KIT IS PROVIDED TO YOU BY EXTREME NETWORKS.
* PLEASE CONTACT EXTREME NETWORKS IF YOU DO NOT HAVE A COPY OF THE LICENSE TERMS. USE OF THE
* DEVELOPER KIT CONSTITUTES YOUR ACCEPTANCE OF THE DEVELOPER KIT LICENSE TERMS.
*
* Copyright (c) Extreme Networks Inc. 2007,2008
*/
package com.extremenetworks.exos.api.examples;
import java.util.Map;
import com.extremenetworks.www.XMLSchema.xos._switch.SwitchPortType;
import com.extremenetworks.www.XMLSchema.xos.common.Session;
import com.extremenetworks.www.XMLSchema.xos.vlan.VlanConfig;
/**
* This is an example of a request to create a new VLAN.
* The example is based on the operations defined in switch.wsdl
*
* A VLAN with name "myVlan1" is created on VR "VR-Default".
*
* Please refer to the EXOS Reference Guide and EXOS Concepts Guide for a detailed description
* of the VLAN feature.
*
* If a VLAN with the name already exists on the switch the operation will fail with an appropriate
* error message.
*
* NOTE: This example code is for illustration purposes only.
* The choice of attributes, values and error handling is simplified to emphasis the API functionality.
*
*/
public class CreateExample {
/**
* Create new VLAN. This sends a create request to create new VLAN on the switch.
*
* Since the create operation works on any object of type ExosBase, the use of
* VlanConfig can be replaced with the object that needs to be created. For example,
* to create a new user account use UserAccount instead of VlanConfig.
*
* @param stub handle to the webservices on the switch
* @param session webservices session on the switch
* @param vlanName name of new VLAN
* @param vrName name of VR for the VLAN
*/
public void createVLAN(SwitchPortType stub, Session session, String vlanName, String vrName) {
Utilities.log("createVLAN: VLAN Name= "+vlanName+", VR Name= "+vrName);
try {
//Object of type ExosBase
VlanConfig filter=new VlanConfig();
filter.setName(vlanName);
filter.setVrName(vrName);
Utilities.sendCreateRequest(stub, session,filter);
} catch(Exception ex) {
Utilities.log("ERROR : "+ex.getMessage());
}
Utilities.log("createVLAN: DONE");
}
/**
* Main execution method
* Usage: CreateExample switch=<switch> username=<username> password=<password>
*
* @param args command line arguments
*/
public static void main(String[] args) {
Map arguments=Utilities.parseCommandLineArgs(args);
String device=(String)arguments.get("switch");
String username=(String)arguments.get("username");
String password=(String)arguments.get("password");
if(device==null || username==null) {
System.out.println("Usage: CreateExample switch=<switch> username=<username> password=<password>");
System.exit(1);
}
try {
//Get handle to switch web service
SwitchPortType stub=Utilities.getSwitchPort(device);
//Open a new session
Session session=Utilities.openSession(stub,username,password);
//Execute operations
CreateExample createExample=new CreateExample();
createExample.createVLAN(stub, session,"myVlan1","VR-Default");
//Close session
Utilities.closeSession(stub, session);
} catch(Exception ex) {
Utilities.log("ERROR : "+ex.getMessage());
}
}
}
|
/*
* Copyright 2017-2020 original authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.micronaut.starter.feature.multitenancy;
import io.micronaut.core.annotation.NonNull;
import io.micronaut.starter.application.ApplicationType;
import io.micronaut.starter.application.generator.GeneratorContext;
import io.micronaut.starter.build.dependencies.Dependency;
import io.micronaut.starter.feature.Category;
import io.micronaut.starter.feature.Feature;
import io.micronaut.starter.feature.server.MicronautServerDependent;
import javax.inject.Singleton;
@Singleton
public class Multitenancy implements Feature, MicronautServerDependent {
@NonNull
@Override
public String getName() {
return "multi-tenancy";
}
@Override
public String getTitle() {
return "Multi-tenancy";
}
@NonNull
@Override
public String getDescription() {
return "Adds multi-tenancy capabilities to your app. Tenant resolution, tenant propagation";
}
@Override
public boolean supports(ApplicationType applicationType) {
return true;
}
@Override
public String getCategory() {
return Category.DATABASE;
}
@Override
public String getMicronautDocumentation() {
return "https://docs.micronaut.io/latest/guide/index.html#multitenancy";
}
@Override
public void apply(GeneratorContext generatorContext) {
generatorContext.addDependency(Dependency.builder()
.groupId("io.micronaut")
.artifactId("micronaut-multitenancy")
.compile());
}
}
|
package com.ruoyi.framework.security.handle;
import com.alibaba.fastjson.JSON;
import com.ruoyi.common.constant.Constants;
import com.ruoyi.common.constant.HttpStatus;
import com.ruoyi.common.utils.ServletUtils;
import com.ruoyi.common.utils.StringUtils;
import com.ruoyi.framework.manager.AsyncManager;
import com.ruoyi.framework.manager.factory.AsyncFactory;
import com.ruoyi.framework.security.LoginUser;
import com.ruoyi.framework.security.service.TokenService;
import com.ruoyi.framework.web.domain.AjaxResult;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.core.Authentication;
import org.springframework.security.web.authentication.logout.LogoutSuccessHandler;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* 自定义退出处理类 返回成功
*
* @author ruoyi
*/
@Configuration
public class LogoutSuccessHandlerImpl implements LogoutSuccessHandler {
@Autowired
private TokenService tokenService;
/**
* 退出处理
*/
@Override
public void onLogoutSuccess(HttpServletRequest request, HttpServletResponse response, Authentication authentication) {
// 获得当前 LoginUser
LoginUser loginUser = tokenService.getLoginUser(request);
// 如果有登陆的情况下
if (StringUtils.isNotNull(loginUser)) {
String userName = loginUser.getUsername();
// 删除用户缓存记录
tokenService.delLoginUser(loginUser.getToken());
// 记录用户退出日志
AsyncManager.me().execute(AsyncFactory.recordLogininfor(userName, Constants.LOGOUT, "退出成功"));
}
// 响应退出成功
ServletUtils.renderString(response, JSON.toJSONString(AjaxResult.error(HttpStatus.SUCCESS, "退出成功")));
}
}
|
/*
* Copyright 2000-2022 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.flow.internal;
import java.io.Serializable;
import java.lang.annotation.Annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.reflect.Method;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import net.bytebuddy.ByteBuddy;
import net.bytebuddy.dynamic.loading.ClassLoadingStrategy;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
public class ReflectToolsTest {
@Rule
public ExpectedException thrown = ExpectedException.none();
public class NonStaticInnerClass {
public NonStaticInnerClass() {
}
}
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE)
public @interface TestAnnotation {
String value();
}
@TestAnnotation("foo")
public static class ClassWithAnnotation {
}
public static class ClassWithoutAnnotation {
}
private class PrivateInnerClass {
}
private static class PrivateStaticInnerClassPublicConstructor {
@SuppressWarnings("unused")
public PrivateStaticInnerClassPublicConstructor() {
}
}
public static class StaticInnerPrivateConstructorClass {
private StaticInnerPrivateConstructorClass() {
}
}
public static class StaticInnerConstructorNeedsParamsClass {
public StaticInnerConstructorNeedsParamsClass(String foo) {
}
}
public static class ConstructorThrowsExceptionClass {
public ConstructorThrowsExceptionClass() {
throw new NullPointerException();
}
}
public static class OkToCreate {
}
public static class VarArgsCtor {
public VarArgsCtor(String... args) {
}
}
public interface Entity<ID> {
ID getId();
void setId(ID id);
}
public static class CustomClassLoader extends ClassLoader {
protected CustomClassLoader(ClassLoader parent) {
super(parent);
}
protected CustomClassLoader() {
super();
}
}
public class Category implements Serializable, Entity<Long> {
@Override
public Long getId() {
return null;
}
@Override
public void setId(Long id) {
}
}
@Test
public void testCreateInstance() {
OkToCreate instance = ReflectTools.createInstance(OkToCreate.class);
Assert.assertNotNull(instance);
Assert.assertSame("Created instance should be of the requested type",
OkToCreate.class, instance.getClass());
}
@Test
public void testCreateInstance_varArgsCtor() {
VarArgsCtor instance = ReflectTools.createInstance(VarArgsCtor.class);
Assert.assertNotNull(instance);
Assert.assertSame("Created instance should be of the requested type",
VarArgsCtor.class, instance.getClass());
}
@Test
public void createNonStaticInnerClass() {
assertError(
ReflectTools.CREATE_INSTANCE_FAILED_FOR_NON_STATIC_MEMBER_CLASS,
NonStaticInnerClass.class);
}
@Test
public void createPrivateInnerClass() {
assertError(
ReflectTools.CREATE_INSTANCE_FAILED_FOR_NON_STATIC_MEMBER_CLASS,
PrivateInnerClass.class);
}
@Test
public void createStaticInnerPrivateConstructorClass() {
assertError(
ReflectTools.CREATE_INSTANCE_FAILED_NO_PUBLIC_NOARG_CONSTRUCTOR,
StaticInnerPrivateConstructorClass.class);
}
@Test
public void createStaticInnerConstructorNeedsParamsClass() {
assertError(
ReflectTools.CREATE_INSTANCE_FAILED_NO_PUBLIC_NOARG_CONSTRUCTOR,
StaticInnerConstructorNeedsParamsClass.class);
}
@Test
public void createConstructorThrowsExceptionClass() {
assertError(
ReflectTools.CREATE_INSTANCE_FAILED_CONSTRUCTOR_THREW_EXCEPTION,
ConstructorThrowsExceptionClass.class);
}
@Test
public void localClass() {
class LocalClass {
}
assertError(ReflectTools.CREATE_INSTANCE_FAILED_LOCAL_CLASS,
LocalClass.class);
}
@Test
public void createProxyForNonStaticInnerClass() {
Class<NonStaticInnerClass> originalClass = NonStaticInnerClass.class;
Class<?> proxyClass = createProxyClass(originalClass);
// Even though proxyClass was created on top of NonStaticInnerClass, the
// exception message does not show it.
// It's sort of a feature, because proxy class is created as a top-level
// class.
assertError(
ReflectTools.CREATE_INSTANCE_FAILED_NO_PUBLIC_NOARG_CONSTRUCTOR,
proxyClass);
// This is how you get correct exception message.
try {
ReflectTools.createProxyInstance(proxyClass, originalClass);
Assert.fail("Creation should cause an exception");
} catch (IllegalArgumentException re) {
Assert.assertEquals(String.format(
ReflectTools.CREATE_INSTANCE_FAILED_FOR_NON_STATIC_MEMBER_CLASS,
originalClass.getName()), re.getMessage());
}
}
public interface TestInterface<T> {
}
public static class HasInterface implements TestInterface<String> {
}
public static class ParentInterface implements TestInterface<Boolean> {
}
public static class ChildInterface extends ParentInterface {
}
public interface TestInterfaceMulti<T, R, S> {
}
public static class HasInterfaceMulti
implements TestInterfaceMulti<String, Integer, Double> {
}
public static class ParentInterfacePartial<Z>
implements TestInterfaceMulti<Boolean, Z, Long> {
}
public static class ParentInterfaceMulti
implements TestInterfaceMulti<Boolean, Float, Long> {
}
public static class ChildInterfaceMulti extends ParentInterfaceMulti {
}
public static class ChildInterfacePartial
extends ParentInterfacePartial<Short> {
}
public static abstract class TestAbstractClass {
}
protected static class TestProtectedClass {
}
protected static class TestPackageProtectedClass {
}
private static class TestPrivateClass {
}
public static class NormalService {
}
public static class TestNoNonArgConstructorClass {
public TestNoNonArgConstructorClass(String foo) {
}
}
@Test
public void getGenericInterfaceClass() {
Class<?> genericInterfaceType = ReflectTools.getGenericInterfaceType(
HasInterface.class, TestInterface.class);
Assert.assertEquals(String.class, genericInterfaceType);
genericInterfaceType = ReflectTools.getGenericInterfaceType(
ChildInterface.class, TestInterface.class);
Assert.assertEquals(Boolean.class, genericInterfaceType);
}
@Test
public void getGenericInterfaceClasses() {
List<Class<?>> genericInterfaceTypes = ReflectTools
.getGenericInterfaceTypes(HasInterface.class,
TestInterface.class);
Assert.assertArrayEquals(new Class<?>[] { String.class },
genericInterfaceTypes.toArray());
genericInterfaceTypes = ReflectTools.getGenericInterfaceTypes(
ChildInterface.class, TestInterface.class);
Assert.assertArrayEquals(new Class<?>[] { Boolean.class },
genericInterfaceTypes.toArray());
genericInterfaceTypes = ReflectTools.getGenericInterfaceTypes(
HasInterfaceMulti.class, TestInterfaceMulti.class);
Assert.assertArrayEquals(
new Class<?>[] { String.class, Integer.class, Double.class },
genericInterfaceTypes.toArray());
genericInterfaceTypes = ReflectTools.getGenericInterfaceTypes(
ChildInterfaceMulti.class, TestInterfaceMulti.class);
Assert.assertArrayEquals(
new Class<?>[] { Boolean.class, Float.class, Long.class },
genericInterfaceTypes.toArray());
genericInterfaceTypes = ReflectTools.getGenericInterfaceTypes(
ChildInterfacePartial.class, TestInterfaceMulti.class);
Assert.assertArrayEquals(
new Class<?>[] { Boolean.class, Short.class, Long.class },
genericInterfaceTypes.toArray());
}
@Test
public void findCommonBaseType_sameType() {
assertSame(Number.class,
ReflectTools.findCommonBaseType(Number.class, Number.class));
}
@Test
public void findCommonBaseType_aExtendsB() {
assertSame(Number.class,
ReflectTools.findCommonBaseType(Integer.class, Number.class));
}
@Test
public void findCommonBaseType_bExtendsA() {
assertSame(Number.class,
ReflectTools.findCommonBaseType(Number.class, Integer.class));
}
@Test
public void findCommonBaseType_commonBase() {
assertSame(Number.class,
ReflectTools.findCommonBaseType(Double.class, Integer.class));
}
@Test
public void findCommonBaseType_noCommonBase() {
assertSame(Object.class,
ReflectTools.findCommonBaseType(String.class, Number.class));
}
@Test
public void findCommonBaseType_interfaceNotSupported() {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("a cannot be an interface");
ReflectTools.findCommonBaseType(Comparable.class, Object.class);
}
@Test
public void findCommonBaseType_primitiveNotSupported() {
thrown.expect(IllegalArgumentException.class);
thrown.expectMessage("a cannot be a primitive type");
ReflectTools.findCommonBaseType(int.class, Object.class);
}
@Test
public void getSetters_classIsGeneric_syntheticMethodsAreFilteredOut() {
List<Method> setters = ReflectTools.getSetterMethods(Category.class)
.collect(Collectors.toList());
Assert.assertEquals(1, setters.size());
Method setter = setters.get(0);
Assert.assertEquals("setId", setter.getName());
Assert.assertEquals(Long.class, setter.getParameterTypes()[0]);
}
@Test
public void findClosestCommonClassLoaderAncestor_findAncestor_whenBothArgumentsAreTheSame() {
CustomClassLoader loader = new CustomClassLoader();
ClassLoader ret = ReflectTools
.findClosestCommonClassLoaderAncestor(loader, loader).get();
Assert.assertEquals(loader, ret);
}
public void findClosestCommonClassLoaderAncestor_null_whenNoSharedAncestor() {
CustomClassLoader loader1 = new CustomClassLoader();
CustomClassLoader loader2 = new CustomClassLoader();
Optional<ClassLoader> ret = ReflectTools
.findClosestCommonClassLoaderAncestor(loader1, loader2);
Assert.assertFalse(ret.isPresent());
}
@Test
public void findClosestCommonClassLoaderAncestor_findsAncestor_whenOneIsParentOfTheOther() {
CustomClassLoader parent = new CustomClassLoader();
CustomClassLoader child = new CustomClassLoader(parent);
ClassLoader ret = ReflectTools
.findClosestCommonClassLoaderAncestor(parent, child).get();
Assert.assertEquals(parent, ret);
}
@Test
public void findClosestCommonClassLoaderAncestor_findsAncestor_whenLoadersShareParent() {
CustomClassLoader parent = new CustomClassLoader();
CustomClassLoader childA = new CustomClassLoader(parent);
CustomClassLoader childB = new CustomClassLoader(parent);
ClassLoader ret = ReflectTools
.findClosestCommonClassLoaderAncestor(childA, childB).get();
Assert.assertEquals(parent, ret);
}
@Test
public void findClosestCommonClassLoaderAncestor_findsAncestor_whenAncestorsAreOnDifferentLevels() {
CustomClassLoader grandParent = new CustomClassLoader();
CustomClassLoader parent = new CustomClassLoader(grandParent);
CustomClassLoader childA = new CustomClassLoader(parent);
CustomClassLoader childB = new CustomClassLoader(grandParent);
ClassLoader ret = ReflectTools
.findClosestCommonClassLoaderAncestor(childA, childB).get();
Assert.assertEquals(grandParent, ret);
}
@Test
public void findClosestCommonClassLoaderAncestor_empty_whenEitherOrBothNull() {
CustomClassLoader loader = new CustomClassLoader();
Optional<ClassLoader> ret;
ret = ReflectTools.findClosestCommonClassLoaderAncestor(loader, null);
Assert.assertFalse(ret.isPresent());
ret = ReflectTools.findClosestCommonClassLoaderAncestor(null, loader);
Assert.assertFalse(ret.isPresent());
ret = ReflectTools.findClosestCommonClassLoaderAncestor(null, null);
Assert.assertFalse(ret.isPresent());
}
@Test
public void hasAnnotation_annotationPresents_returnsTrue() {
Assert.assertTrue(ReflectTools.hasAnnotation(ClassWithAnnotation.class,
TestAnnotation.class.getName()));
}
@Test
public void hasAnnotation_annotationIsAbsent_returnsFalse() {
Assert.assertFalse(ReflectTools.hasAnnotation(
ClassWithoutAnnotation.class, TestAnnotation.class.getName()));
}
@Test
public void hasAnnotationWithSimpleName_annotationPresents_returnsTrue() {
Assert.assertTrue(ReflectTools.hasAnnotationWithSimpleName(
ClassWithAnnotation.class,
TestAnnotation.class.getSimpleName()));
}
@Test
public void hasAnnotationWithSimpleName_annotationIsAbsent_returnsFalse() {
Assert.assertFalse(ReflectTools.hasAnnotationWithSimpleName(
ClassWithoutAnnotation.class,
TestAnnotation.class.getSimpleName()));
}
@Test
public void getAnnotationMethodValue_annotaitonHasMethod_theValueIsReturned() {
Assert.assertEquals("foo", ReflectTools.getAnnotationMethodValue(
ClassWithAnnotation.class.getAnnotation(TestAnnotation.class),
"value"));
}
@Test(expected = IllegalArgumentException.class)
public void getAnnotationMethodValue_annotationHasNoMethod_throws() {
ReflectTools.getAnnotationMethodValue(
ClassWithAnnotation.class.getAnnotation(TestAnnotation.class),
"foo");
}
@Test
public void getAnnotation_annotationPresents_returnsAnnotation() {
Optional<Annotation> annotation = ReflectTools.getAnnotation(
ClassWithAnnotation.class, TestAnnotation.class.getName());
Assert.assertTrue(annotation.isPresent());
Assert.assertEquals(
ClassWithAnnotation.class.getAnnotation(TestAnnotation.class),
annotation.get());
}
@Test
public void getAnnotation_annotationIsAbsent_returnsEmpty() {
Optional<Annotation> annotation = ReflectTools.getAnnotation(
ClassWithoutAnnotation.class, TestAnnotation.class.getName());
Assert.assertFalse(annotation.isPresent());
}
@Test
public void intefaceShouldNotBeInstantiableService() {
assertFalse(ReflectTools.isInstantiableService(TestInterface.class));
}
@Test
public void abstractClassShouldNotBeInstantiableService() {
assertFalse(
ReflectTools.isInstantiableService(TestAbstractClass.class));
}
@Test
public void nonPublicClassShouldNotBeInstantiableService() {
assertFalse(
ReflectTools.isInstantiableService(TestProtectedClass.class));
assertFalse(ReflectTools
.isInstantiableService(TestPackageProtectedClass.class));
assertFalse(ReflectTools.isInstantiableService(TestPrivateClass.class));
}
@Test
public void ClassWithoutNonArgConstructorShouldNotBeInstantiableService() {
assertFalse(ReflectTools
.isInstantiableService(TestNoNonArgConstructorClass.class));
}
@Test
public void nonStaticInnerClassShouldNotBeInstantiableService() {
assertFalse(
ReflectTools.isInstantiableService(NonStaticInnerClass.class));
}
@Test
public void privateInnerClassShouldNotBeInstantiableService() {
assertFalse(
ReflectTools.isInstantiableService(PrivateInnerClass.class));
}
@Test
public void normalSericieShouldBeInstantiableService() {
assertTrue(ReflectTools.isInstantiableService(NormalService.class));
}
private Class<?> createProxyClass(Class<?> originalClass) {
return new ByteBuddy().subclass(originalClass).make()
.load(originalClass.getClassLoader(),
ClassLoadingStrategy.Default.WRAPPER)
.getLoaded();
}
private void assertError(String expectedError, Class<?> cls) {
try {
ReflectTools.createInstance(cls);
Assert.fail("Creation should cause an exception");
} catch (IllegalArgumentException re) {
Assert.assertEquals(String.format(expectedError, cls.getName()),
re.getMessage());
}
}
}
|
package ncu.cc.digger.entities;
import javax.persistence.*;
import java.sql.Timestamp;
import java.util.Objects;
@Entity
@Table(name = "report_histories", schema = "digger_db", catalog = "")
public class ReportHistoryEntity {
private Integer id;
private String zoneId;
private String parentZone;
private Integer score;
private String soaEmail;
private Long soaSerialno;
private Byte severityLevel;
private Integer severityUrgent;
private Integer severityHigh;
private Integer severityMedium;
private Integer severityLow;
private Integer severityInfo;
private Byte dnssecEnabled;
private Byte ipv6Available;
private Byte soaInconsistency;
private Byte openRecursive;
private Byte openAxfr;
private Byte nonCompliantEdns;
private Byte serverNotWorking;
private Byte rrsetInconsistency;
private Integer numberOfServers;
private Integer numberOfProblems;
private String jsonReport;
private String remoteAddress;
private Timestamp updatedAt;
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
@Column(name = "id", nullable = false)
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
@Basic
@Column(name = "zone_id", nullable = false, length = 255)
public String getZoneId() {
return zoneId;
}
public void setZoneId(String zoneId) {
this.zoneId = zoneId;
}
@Basic
@Column(name = "parent_zone", nullable = true, length = 255)
public String getParentZone() {
return parentZone;
}
public void setParentZone(String parentZone) {
this.parentZone = parentZone;
}
@Basic
@Column(name = "score", nullable = true)
public Integer getScore() {
return score;
}
public void setScore(Integer score) {
this.score = score;
}
@Basic
@Column(name = "soa_email", nullable = false, length = 72)
public String getSoaEmail() {
return soaEmail;
}
public void setSoaEmail(String soaEmail) {
this.soaEmail = soaEmail;
}
@Basic
@Column(name = "soa_serialno", nullable = false)
public Long getSoaSerialno() {
return soaSerialno;
}
public void setSoaSerialno(Long soaSerialno) {
this.soaSerialno = soaSerialno;
}
@Basic
@Column(name = "severity_level", nullable = false)
public Byte getSeverityLevel() {
return severityLevel;
}
public void setSeverityLevel(Byte severityLevel) {
this.severityLevel = severityLevel;
}
@Basic
@Column(name = "severity_urgent", nullable = false)
public Integer getSeverityUrgent() {
return severityUrgent;
}
public void setSeverityUrgent(Integer severityUrgent) {
this.severityUrgent = severityUrgent;
}
@Basic
@Column(name = "severity_high", nullable = false)
public Integer getSeverityHigh() {
return severityHigh;
}
public void setSeverityHigh(Integer severityHigh) {
this.severityHigh = severityHigh;
}
@Basic
@Column(name = "severity_medium", nullable = false)
public Integer getSeverityMedium() {
return severityMedium;
}
public void setSeverityMedium(Integer severityMedium) {
this.severityMedium = severityMedium;
}
@Basic
@Column(name = "severity_low", nullable = false)
public Integer getSeverityLow() {
return severityLow;
}
public void setSeverityLow(Integer severityLow) {
this.severityLow = severityLow;
}
@Basic
@Column(name = "severity_info", nullable = false)
public Integer getSeverityInfo() {
return severityInfo;
}
public void setSeverityInfo(Integer severityInfo) {
this.severityInfo = severityInfo;
}
@Basic
@Column(name = "dnssec_enabled", nullable = false)
public Byte getDnssecEnabled() {
return dnssecEnabled;
}
public void setDnssecEnabled(Byte dnssecEnabled) {
this.dnssecEnabled = dnssecEnabled;
}
@Basic
@Column(name = "ipv6_available", nullable = true)
public Byte getIpv6Available() {
return ipv6Available;
}
public void setIpv6Available(Byte ipv6Available) {
this.ipv6Available = ipv6Available;
}
@Basic
@Column(name = "soa_inconsistency", nullable = false)
public Byte getSoaInconsistency() {
return soaInconsistency;
}
public void setSoaInconsistency(Byte soaInconsistency) {
this.soaInconsistency = soaInconsistency;
}
@Basic
@Column(name = "open_recursive", nullable = false)
public Byte getOpenRecursive() {
return openRecursive;
}
public void setOpenRecursive(Byte openRecursive) {
this.openRecursive = openRecursive;
}
@Basic
@Column(name = "open_axfr", nullable = false)
public Byte getOpenAxfr() {
return openAxfr;
}
public void setOpenAxfr(Byte openAxfr) {
this.openAxfr = openAxfr;
}
@Basic
@Column(name = "non_compliant_edns", nullable = false)
public Byte getNonCompliantEdns() {
return nonCompliantEdns;
}
public void setNonCompliantEdns(Byte nonCompliantEdns) {
this.nonCompliantEdns = nonCompliantEdns;
}
@Basic
@Column(name = "server_not_working", nullable = false)
public Byte getServerNotWorking() {
return serverNotWorking;
}
public void setServerNotWorking(Byte serverNotWorking) {
this.serverNotWorking = serverNotWorking;
}
@Basic
@Column(name = "rrset_inconsistency", nullable = false)
public Byte getRrsetInconsistency() {
return rrsetInconsistency;
}
public void setRrsetInconsistency(Byte rrsetInconsistency) {
this.rrsetInconsistency = rrsetInconsistency;
}
@Basic
@Column(name = "number_of_servers", nullable = false)
public Integer getNumberOfServers() {
return numberOfServers;
}
public void setNumberOfServers(Integer numberOfServers) {
this.numberOfServers = numberOfServers;
}
@Basic
@Column(name = "number_of_problems", nullable = false)
public Integer getNumberOfProblems() {
return numberOfProblems;
}
public void setNumberOfProblems(Integer numberOfProblems) {
this.numberOfProblems = numberOfProblems;
}
@Basic
@Column(name = "json_report", nullable = true, length = -1)
public String getJsonReport() {
return jsonReport;
}
public void setJsonReport(String jsonReport) {
this.jsonReport = jsonReport;
}
@Basic
@Column(name = "remote_address", nullable = false, length = 128)
public String getRemoteAddress() {
return remoteAddress;
}
public void setRemoteAddress(String remoteAddress) {
this.remoteAddress = remoteAddress;
}
@Basic
@Column(name = "updated_at", nullable = true)
public Timestamp getUpdatedAt() {
return updatedAt;
}
public void setUpdatedAt(Timestamp updatedAt) {
this.updatedAt = updatedAt;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ReportHistoryEntity that = (ReportHistoryEntity) o;
return Objects.equals(id, that.id) &&
Objects.equals(zoneId, that.zoneId) &&
Objects.equals(score, that.score) &&
Objects.equals(soaEmail, that.soaEmail) &&
Objects.equals(soaSerialno, that.soaSerialno) &&
Objects.equals(severityLevel, that.severityLevel) &&
Objects.equals(severityUrgent, that.severityUrgent) &&
Objects.equals(severityHigh, that.severityHigh) &&
Objects.equals(severityMedium, that.severityMedium) &&
Objects.equals(severityLow, that.severityLow) &&
Objects.equals(severityInfo, that.severityInfo) &&
Objects.equals(dnssecEnabled, that.dnssecEnabled) &&
Objects.equals(soaInconsistency, that.soaInconsistency) &&
Objects.equals(openRecursive, that.openRecursive) &&
Objects.equals(openAxfr, that.openAxfr) &&
Objects.equals(nonCompliantEdns, that.nonCompliantEdns) &&
Objects.equals(serverNotWorking, that.serverNotWorking) &&
Objects.equals(rrsetInconsistency, that.rrsetInconsistency) &&
Objects.equals(numberOfServers, that.numberOfServers) &&
Objects.equals(numberOfProblems, that.numberOfProblems) &&
Objects.equals(jsonReport, that.jsonReport) &&
Objects.equals(remoteAddress, that.remoteAddress) &&
Objects.equals(updatedAt, that.updatedAt);
}
@Override
public int hashCode() {
return Objects.hash(id, zoneId, score, soaEmail, soaSerialno, severityLevel, severityUrgent, severityHigh, severityMedium, severityLow, severityInfo, dnssecEnabled, soaInconsistency, openRecursive, openAxfr, nonCompliantEdns, serverNotWorking, rrsetInconsistency, numberOfServers, numberOfProblems, jsonReport, remoteAddress, updatedAt);
}
}
|
package com.amcharts.json;
import java.util.List;
import com.amcharts.api.IsCategoryAxis;
import com.amcharts.api.IsDateFormat;
import com.amcharts.api.IsFunction;
public final class CategoryAxis extends AxisBase implements IsCategoryAxis
{
private double autoRotateAngle;
private double autoRotateCount;
private Boolean autoWrap;
private Boolean boldPeriodBeginning;
private IsFunction categoryFunction;
private Boolean centerLabelOnFullPeriod;
private List< ? extends IsDateFormat> dateFormats;
private Boolean equalSpacing;
private double firstDayOfWeek;
private String forceShowField;
private String gridPosition;
private String labelColorField;
private IsFunction labelFunction;
private Boolean markPeriodChange;
private String minPeriod;
private Boolean parseDates;
private String position;
private Boolean startOnAxis;
private String tickPosition;
private Boolean twoLineMode;
/**
* "Angle of label rotation, if the number of series exceeds autoRotateCount and parseDates is set to false."
*/
public double getAutoRotateAngle()
{
return autoRotateAngle;
}
/**
* "Angle of label rotation, if the number of series exceeds autoRotateCount and parseDates is set to false."
*/
public void setAutoRotateAngle( double autoRotateAngle )
{
this.autoRotateAngle = autoRotateAngle;
}
/**
* "If the number of category axis items will exceed the autoRotateCount, the labels will be rotated by autoRotateAngle degree. Works only if parseDates is false."
*/
public double getAutoRotateCount()
{
return autoRotateCount;
}
/**
* "If the number of category axis items will exceed the autoRotateCount, the labels will be rotated by autoRotateAngle degree. Works only if parseDates is false."
*/
public void setAutoRotateCount( double autoRotateCount )
{
this.autoRotateCount = autoRotateCount;
}
/**
* Specifies if axis labels (only when it is horizontal) should be wrapped if they don't fit in the allocated space. Wrapping won't work for rotated axis labels.
*/
public Boolean isAutoWrap()
{
return autoWrap;
}
/**
* Specifies if axis labels (only when it is horizontal) should be wrapped if they don't fit in the allocated space. Wrapping won't work for rotated axis labels.
*/
public void setAutoWrap( Boolean autoWrap )
{
this.autoWrap = autoWrap;
}
/**
* "When parse dates is on for the category axis, the chart will try to highlight the beginning of the periods, like month, in bold. Set this to false to disable the functionality."
*/
public Boolean isBoldPeriodBeginning()
{
return boldPeriodBeginning;
}
/**
* "When parse dates is on for the category axis, the chart will try to highlight the beginning of the periods, like month, in bold. Set this to false to disable the functionality."
*/
public void setBoldPeriodBeginning( Boolean boldPeriodBeginning )
{
this.boldPeriodBeginning = boldPeriodBeginning;
}
/**
* "specifies a method that returns the value that should be used as categoryValue for current item. If this property is set, the return value of the custom data function takes precedence over categoryField. When a chart calls this method, it passes category value, data item from chart's data provider and reference to categoryAxis: categoryFunction(category, dataItem, categoryAxis); This method can be used both when category axis parses dates and when it doesn't. If axis parses dates, your categoryFunction should return Date object. For example, if you have date strings in your data, you can use this method to convert these strings into Date objects."""
*/
public IsFunction getCategoryFunction()
{
return categoryFunction;
}
/**
* "specifies a method that returns the value that should be used as categoryValue for current item. If this property is set, the return value of the custom data function takes precedence over categoryField. When a chart calls this method, it passes category value, data item from chart's data provider and reference to categoryAxis: categoryFunction(category, dataItem, categoryAxis); This method can be used both when category axis parses dates and when it doesn't. If axis parses dates, your categoryFunction should return Date object. For example, if you have date strings in your data, you can use this method to convert these strings into Date objects."""
*/
public void setCategoryFunction( IsFunction categoryFunction )
{
this.categoryFunction = categoryFunction;
}
/**
* "This setting works only when parseDates is set to true and equalSpacing is set to false. In case you set it to false, labels will never be centered between grid lines."
*/
public Boolean isCenterLabelOnFullPeriod()
{
return centerLabelOnFullPeriod;
}
/**
* "This setting works only when parseDates is set to true and equalSpacing is set to false. In case you set it to false, labels will never be centered between grid lines."
*/
public void setCenterLabelOnFullPeriod( Boolean centerLabelOnFullPeriod )
{
this.centerLabelOnFullPeriod = centerLabelOnFullPeriod;
}
/**
* "Date formats of different periods. Possible period values: fff - milliseconds, ss - seconds, mm - minutes, hh - hours, DD - days, MM - months, WW - weeks, YYYY - years. Check this page for date formatting strings."
*/
public List<? extends IsDateFormat> getDateFormats()
{
return dateFormats;
}
/**
* "Date formats of different periods. Possible period values: fff - milliseconds, ss - seconds, mm - minutes, hh - hours, DD - days, MM - months, WW - weeks, YYYY - years. Check this page for date formatting strings."
*/
public void setDateFormats( List<? extends IsDateFormat> dateFormats )
{
this.dateFormats = dateFormats;
}
/**
* "In case your category axis values are Date objects and parseDates is set to true, the chart will parse dates and will place your data points at irregular intervals. However if you want dates to be parsed (displayed on the axis, baloons, etc), but data points to be placed at equal intervals (omiting dates with no data), set equalSpacing to true."
*/
public Boolean isEqualSpacing()
{
return equalSpacing;
}
/**
* "In case your category axis values are Date objects and parseDates is set to true, the chart will parse dates and will place your data points at irregular intervals. However if you want dates to be parsed (displayed on the axis, baloons, etc), but data points to be placed at equal intervals (omiting dates with no data), set equalSpacing to true."
*/
public void setEqualSpacing( Boolean equalSpacing )
{
this.equalSpacing = equalSpacing;
}
/**
* "Sets first day of the week. 0 is Sunday, 1 is Monday, etc."
*/
public double getFirstDayOfWeek()
{
return firstDayOfWeek;
}
/**
* "Sets first day of the week. 0 is Sunday, 1 is Monday, etc."
*/
public void setFirstDayOfWeek( double firstDayOfWeek )
{
this.firstDayOfWeek = firstDayOfWeek;
}
/**
* "Field in data provider which specifies if the category value should always be shown. For example: categoryAxis.forceShowField = 'forceShow'; And in data: {category:'one', forceShow:true, value:100} Note, this works only when parseDates is set to false."
*/
public String getForceShowField()
{
return forceShowField;
}
/**
* "Field in data provider which specifies if the category value should always be shown. For example: categoryAxis.forceShowField = 'forceShow'; And in data: {category:'one', forceShow:true, value:100} Note, this works only when parseDates is set to false."
*/
public void setForceShowField( String forceShowField )
{
this.forceShowField = forceShowField;
}
/**
* Specifies if a grid line is placed on the center of a cell or on the beginning of a cell. Possible values are: 'start' and 'middle' This setting doesn't work if parseDates is set to true.
*/
public String getGridPosition()
{
return gridPosition;
}
/**
* Specifies if a grid line is placed on the center of a cell or on the beginning of a cell. Possible values are: 'start' and 'middle' This setting doesn't work if parseDates is set to true.
*/
public void setGridPosition( String gridPosition )
{
this.gridPosition = gridPosition;
}
/**
* You can use it to set color of a axis label. Works only with non-date-based data.
*/
public String getLabelColorField()
{
return labelColorField;
}
/**
* You can use it to set color of a axis label. Works only with non-date-based data.
*/
public void setLabelColorField( String labelColorField )
{
this.labelColorField = labelColorField;
}
/**
* "You can use this function to format Category axis labels. If this function is set, then it is called with the following parameters passed: if dates are not parsed: labelFunction(valueText, serialDataItem, categoryAxis) if dates are parsed: labelFunction(valueText, date, categoryAxis) Your function should return string which will be displayed on the axis."
*/
public IsFunction getLabelFunction()
{
return labelFunction;
}
/**
* "You can use this function to format Category axis labels. If this function is set, then it is called with the following parameters passed: if dates are not parsed: labelFunction(valueText, serialDataItem, categoryAxis) if dates are parsed: labelFunction(valueText, date, categoryAxis) Your function should return string which will be displayed on the axis."
*/
public void setLabelFunction( IsFunction labelFunction )
{
this.labelFunction = labelFunction;
}
/**
* "If you set it to false, the start of longer periods won't use a different date format and won't be bold."
*/
public Boolean isMarkPeriodChange()
{
return markPeriodChange;
}
/**
* "If you set it to false, the start of longer periods won't use a different date format and won't be bold."
*/
public void setMarkPeriodChange( Boolean markPeriodChange )
{
this.markPeriodChange = markPeriodChange;
}
/**
* "Specifies the shortest period of your data. This should be set only if parseDates is set to 'true'. Possible period values: fff - milliseconds, ss - seconds, mm - minutes, hh - hours, DD - days, MM - months, YYYY - years. It's also possible to supply a number for increments, i.e. '15mm' which will instruct the chart that your data is supplied in 15 minute increments."
*/
public String getMinPeriod()
{
return minPeriod;
}
/**
* "Specifies the shortest period of your data. This should be set only if parseDates is set to 'true'. Possible period values: fff - milliseconds, ss - seconds, mm - minutes, hh - hours, DD - days, MM - months, YYYY - years. It's also possible to supply a number for increments, i.e. '15mm' which will instruct the chart that your data is supplied in 15 minute increments."
*/
public void setMinPeriod( String minPeriod )
{
this.minPeriod = minPeriod;
}
/**
* "In case your category axis values are Date objects, set this to true. In this case the chart will parse dates and will place your data points at irregular intervals. If you want dates to be parsed, but data points to be placed at equal intervals, set both parseDates and equalSpacing to true."
*/
public Boolean isParseDates()
{
return parseDates;
}
/**
* "In case your category axis values are Date objects, set this to true. In this case the chart will parse dates and will place your data points at irregular intervals. If you want dates to be parsed, but data points to be placed at equal intervals, set both parseDates and equalSpacing to true."
*/
public void setParseDates( Boolean parseDates )
{
this.parseDates = parseDates;
}
/**
* "Possible values are: 'top', 'bottom', 'left', 'right'. If axis is vertical, default position is 'left'. If axis is horizontal, default position is 'bottom'."
*/
public String getPosition()
{
return position;
}
/**
* "Possible values are: 'top', 'bottom', 'left', 'right'. If axis is vertical, default position is 'left'. If axis is horizontal, default position is 'bottom'."
*/
public void setPosition( String position )
{
this.position = position;
}
/**
* "Specifies whether the graph should start on axis or not. In case you display columns, it is recommended to set this to false. If parseDates is set to true, startOnAxis will allways be false, unless equalSpacing is set to true."
*/
public Boolean isStartOnAxis()
{
return startOnAxis;
}
/**
* "Specifies whether the graph should start on axis or not. In case you display columns, it is recommended to set this to false. If parseDates is set to true, startOnAxis will allways be false, unless equalSpacing is set to true."
*/
public void setStartOnAxis( Boolean startOnAxis )
{
this.startOnAxis = startOnAxis;
}
/**
* Position of a axis tick. Works only with non-date-based data.
*/
public String getTickPosition()
{
return tickPosition;
}
/**
* Position of a axis tick. Works only with non-date-based data.
*/
public void setTickPosition( String tickPosition )
{
this.tickPosition = tickPosition;
}
/**
* "Works only when parseDates is set to true and equalSpacing is false. If you set it to true, at the position where bigger period changes, category axis will display date strings of bot small and big period, in two rows."
*/
public Boolean isTwoLineMode()
{
return twoLineMode;
}
/**
* "Works only when parseDates is set to true and equalSpacing is false. If you set it to true, at the position where bigger period changes, category axis will display date strings of bot small and big period, in two rows."
*/
public void setTwoLineMode( Boolean twoLineMode )
{
this.twoLineMode = twoLineMode;
}
}
|
package org.codefilarete.stalactite.sql.result;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.function.BiConsumer;
import java.util.function.Function;
import java.util.function.Supplier;
import org.codefilarete.tool.Reflections;
import org.codefilarete.tool.function.TriConsumer;
/**
* An interface aimed at abstracting the way how relations between 2 beans are filled : implementation should handle one-to-one relationship
* as well as one-to-many relationship.
* Since implementations are quite simple, they are done through all "of" static methods in this interface.
*
* @param <E> bean type on which relation must be applied to
* @param <I> relation input type
*
* @author Guillaume Mary
* @see #of(BiConsumer, Function, Class)
* @see #of(BiConsumer, Function, Supplier)
* @see #of(BiConsumer)
*/
@FunctionalInterface
public interface BeanRelationFixer<E, I> {
/**
* Main method that fills the relation
*
* @param target the owner of the relation
* @param input the objet to be written/added into the relation
*/
void apply(E target, I input);
/**
* Shortcut to create a {@link BeanRelationFixer} for a one-to-one relation.
*
* @param setter the method that fixes the relation
* @return a {@link BeanRelationFixer} mapped to {@link BiConsumer#accept(Object, Object)}
*/
static <E, I> BeanRelationFixer<E, I> of(BiConsumer<E, I> setter) {
return of(setter, (a, b) -> { /* no bi-directional relation, nothing to do */ });
}
/**
* Shortcut to create a {@link BeanRelationFixer} for a bidirectionnal one-to-one relation.
*
* @param setter the method that fixes the relation
* @return a {@link BeanRelationFixer} mapped to {@link BiConsumer#accept(Object, Object)}
*/
static <E, I> BeanRelationFixer<E, I> of(BiConsumer<E, I> setter, BiConsumer<I, E> reverseSetter) {
return (s, i) -> {
setter.accept(s, i);
// bidirectional assignment
reverseSetter.accept(i, s);
};
}
/**
* Returns a {@link Supplier} of concrete instance for given collection type : for {@link List} and {@link Set} types it respectively
* returns an {@link ArrayList} instance and an {@link HashSet} instance, for any other case collectionType is expected to be concrete therefore
* it will try to instantiate it.
*
* @param collectionType expected to be one of List.class or Set.class or a concrete type
* @return a {@link Supplier} of a concrete {@link Collection} compatible with given collectionType
*/
static <C extends Collection> Supplier<C> giveCollectionFactory(Class<C> collectionType) {
Class<? extends C> concreteType;
if (List.class.equals(collectionType)) {
concreteType = (Class) ArrayList.class;
} else if (Set.class.equals(collectionType)) {
concreteType = (Class) HashSet.class;
} else {
// given type is expected to be concrete, we'll instantiate it
concreteType = collectionType;
}
return () -> Reflections.newInstance(concreteType);
}
/**
* Shortcut to {@link #of(BiConsumer, Function, Supplier)} with a supplier that will instantiate the given concrete Collection class
*
* @param setter the method that sets the {@link Collection} onto the target bean
* @param getter the method that gets the {@link Collection} from the target bean
* @param concreteCollectionType the Class that will be instanced to fill the relation if it is null
* @return a {@link BeanRelationFixer} that will add the input to the Collection and create if the getter returns null
*/
static <E, I, C extends Collection<I>> BeanRelationFixer<E, I> of(BiConsumer<E, C> setter, Function<E, C> getter,
Class<? extends C> concreteCollectionType) {
return of(setter, getter, () -> Reflections.newInstance(concreteCollectionType));
}
/**
* Shortcut to create a {@link BeanRelationFixer} for a one-to-many relation where the attribute is a {@link Collection}
*
* @param setter the method that sets the {@link Collection} onto the target bean
* @param getter the method that gets the {@link Collection} from the target bean
* @param collectionFactory a supplier of an instance to fill the relation if it is null
* @return a {@link BeanRelationFixer} that will add the input to the Collection and create if the getter returns null
*/
static <E, I, C extends Collection<I>> BeanRelationFixer<E, I> of(BiConsumer<E, C> setter, Function<E, C> getter, Supplier<C> collectionFactory) {
return of(setter, getter, collectionFactory, (a, b) -> { /* no bi-directional relation, nothing to do */ });
}
/**
* Shortcut to {@link #of(BiConsumer, Function, Supplier, BiConsumer)} with a supplier that will instantiate the given concrete Collection class
*
* @param setter the method that sets the {@link Collection} onto the target bean
* @param getter the method that gets the {@link Collection} from the target bean
* @param concreteCollectionType the Class that will be instanced to fill the relation if it is null
* @return a {@link BeanRelationFixer} that will add the input to the Collection and create if the getter returns null
*/
static <E, I, C extends Collection<I>> BeanRelationFixer<E, I> of(BiConsumer<E, C> setter, Function<E, C> getter,
Class<? extends C> concreteCollectionType,
BiConsumer<I, E> reverseSetter) {
return of(setter, getter, () -> Reflections.newInstance(concreteCollectionType), reverseSetter);
}
/**
* Shortcut to create a {@link BeanRelationFixer} for a bidirectionnal relation where the attribute is a {@link Collection}
*
* @param setter the method that sets the {@link Collection} onto the target bean
* @param getter the method that gets the {@link Collection} from the target bean
* @param collectionFactory a supplier of an instance to fill the relation if it is null
* @param reverseSetter the setter for the other side of the relation
* @return a {@link BeanRelationFixer} that will add the input to the Collection and create if the getter returns null
*/
static <E, I, C extends Collection<I>> BeanRelationFixer<E, I> of(BiConsumer<E, C> setter, Function<E, C> getter, Supplier<C> collectionFactory,
BiConsumer<I, E> reverseSetter) {
return ofAdapter(setter, getter, collectionFactory, (target, input, collection) -> {
collection.add(input);
// bidirectional assignment
reverseSetter.accept(input, target);
});
}
/**
* Shortcut to create a {@link BeanRelationFixer} for a relation where the attribute is a {@link Collection}
*
* @param setter the method that sets the {@link Collection} onto the target bean
* @param getter the method that gets the {@link Collection} from the target bean
* @param collectionFactory a supplier of an instance to fill the relation if it is null
* @param adapter the final method that will be applied to bean, input and collection, expected to have at least a collection add, with eventual input adaptation
* @return a {@link BeanRelationFixer} that will add the input to the Collection and create if the getter returns null
*/
static <E, I, C extends Collection<?>> BeanRelationFixer<E, I> ofAdapter(BiConsumer<E, C> setter, Function<E, C> getter, Supplier<C> collectionFactory,
TriConsumer<E, I, C> adapter) {
return (target, input) -> {
C collection = getter.apply(target);
if (collection == null) {
// we fill the relation
collection = collectionFactory.get();
setter.accept(target, collection);
}
adapter.accept(target, input, collection);
};
}
}
|
package org.usfirst.frc4388.utility;
/**
* Interface for loops, which are routine that run periodically in the robot code (such as periodic gyroscope
* calibration, etc.)
*/
public interface Loop {
public void onStart(double timestamp);
public void onLoop(double timestamp);
public void onStop(double timestamp);
}
|
/*
* Copyright (C) 2016-2019 crDroid Android Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.bianca.support.preferences;
import android.content.Context;
import android.util.AttributeSet;
public class SecureSettingSeekBarPreference extends CustomSeekBarPreference {
public SecureSettingSeekBarPreference(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
setPreferenceDataStore(new SecureSettingsStore(context.getContentResolver()));
}
public SecureSettingSeekBarPreference(Context context, AttributeSet attrs) {
super(context, attrs);
setPreferenceDataStore(new SecureSettingsStore(context.getContentResolver()));
}
public SecureSettingSeekBarPreference(Context context) {
super(context, null);
setPreferenceDataStore(new SecureSettingsStore(context.getContentResolver()));
}
}
|
import org.checkerframework.checker.testchecker.wholeprograminference.qual.Sibling2;
import org.checkerframework.checker.testchecker.wholeprograminference.qual.Top;
import org.checkerframework.checker.testchecker.wholeprograminference.qual.WholeProgramInferenceBottom;
public class UsesAnonymous {
void method() {
Anonymous a =
new Anonymous() {
int innerField;
public void method2() {
Anonymous.field1 = getSibling2();
Anonymous.field2 = getSibling2();
innerField = getSibling2();
}
void innerFieldTest() {
// :: warning: (argument)
expectsSibling2(innerField);
}
@WholeProgramInferenceBottom int getBottom() {
return (@WholeProgramInferenceBottom int) 0;
}
@Top int getTop() {
return (@Top int) 0;
}
@Sibling2 int getSibling2() {
return (@Sibling2 int) 0;
}
void expectsSibling2(@Sibling2 int t) {}
};
}
}
|
begin_unit|revision:0.9.5;language:Java;cregit-version:0.0.1
begin_comment
comment|/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */
end_comment
begin_package
DECL|package|org.apache.hadoop.yarn.nodelabels
package|package
name|org
operator|.
name|apache
operator|.
name|hadoop
operator|.
name|yarn
operator|.
name|nodelabels
package|;
end_package
begin_import
import|import static
name|org
operator|.
name|junit
operator|.
name|Assert
operator|.
name|assertTrue
import|;
end_import
begin_import
import|import
name|java
operator|.
name|io
operator|.
name|IOException
import|;
end_import
begin_import
import|import
name|java
operator|.
name|util
operator|.
name|Arrays
import|;
end_import
begin_import
import|import
name|java
operator|.
name|util
operator|.
name|Collection
import|;
end_import
begin_import
import|import
name|java
operator|.
name|util
operator|.
name|HashSet
import|;
end_import
begin_import
import|import
name|java
operator|.
name|util
operator|.
name|Map
import|;
end_import
begin_import
import|import
name|java
operator|.
name|util
operator|.
name|Set
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|commons
operator|.
name|lang3
operator|.
name|StringUtils
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|hadoop
operator|.
name|conf
operator|.
name|Configuration
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|hadoop
operator|.
name|yarn
operator|.
name|api
operator|.
name|records
operator|.
name|NodeId
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|hadoop
operator|.
name|yarn
operator|.
name|api
operator|.
name|records
operator|.
name|NodeLabel
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|hadoop
operator|.
name|yarn
operator|.
name|conf
operator|.
name|YarnConfiguration
import|;
end_import
begin_import
import|import
name|org
operator|.
name|junit
operator|.
name|After
import|;
end_import
begin_import
import|import
name|org
operator|.
name|junit
operator|.
name|Assert
import|;
end_import
begin_import
import|import
name|org
operator|.
name|junit
operator|.
name|Before
import|;
end_import
begin_import
import|import
name|org
operator|.
name|junit
operator|.
name|Test
import|;
end_import
begin_import
import|import
name|com
operator|.
name|google
operator|.
name|common
operator|.
name|collect
operator|.
name|ImmutableMap
import|;
end_import
begin_import
import|import
name|com
operator|.
name|google
operator|.
name|common
operator|.
name|collect
operator|.
name|ImmutableSet
import|;
end_import
begin_import
import|import
name|com
operator|.
name|google
operator|.
name|common
operator|.
name|collect
operator|.
name|Sets
import|;
end_import
begin_class
DECL|class|TestCommonNodeLabelsManager
specifier|public
class|class
name|TestCommonNodeLabelsManager
extends|extends
name|NodeLabelTestBase
block|{
DECL|field|mgr
name|DummyCommonNodeLabelsManager
name|mgr
init|=
literal|null
decl_stmt|;
annotation|@
name|Before
DECL|method|before ()
specifier|public
name|void
name|before
parameter_list|()
block|{
name|mgr
operator|=
operator|new
name|DummyCommonNodeLabelsManager
argument_list|()
expr_stmt|;
name|Configuration
name|conf
init|=
operator|new
name|YarnConfiguration
argument_list|()
decl_stmt|;
name|conf
operator|.
name|setBoolean
argument_list|(
name|YarnConfiguration
operator|.
name|NODE_LABELS_ENABLED
argument_list|,
literal|true
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|init
argument_list|(
name|conf
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|start
argument_list|()
expr_stmt|;
block|}
annotation|@
name|After
DECL|method|after ()
specifier|public
name|void
name|after
parameter_list|()
block|{
name|mgr
operator|.
name|stop
argument_list|()
expr_stmt|;
block|}
annotation|@
name|Test
argument_list|(
name|timeout
operator|=
literal|5000
argument_list|)
DECL|method|testAddRemovelabel ()
specifier|public
name|void
name|testAddRemovelabel
parameter_list|()
throws|throws
name|Exception
block|{
comment|// Add some label
name|mgr
operator|.
name|addToCluserNodeLabelsWithDefaultExclusivity
argument_list|(
name|ImmutableSet
operator|.
name|of
argument_list|(
literal|"hello"
argument_list|)
argument_list|)
expr_stmt|;
name|verifyNodeLabelAdded
argument_list|(
name|Sets
operator|.
name|newHashSet
argument_list|(
literal|"hello"
argument_list|)
argument_list|,
name|mgr
operator|.
name|lastAddedlabels
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|addToCluserNodeLabelsWithDefaultExclusivity
argument_list|(
name|ImmutableSet
operator|.
name|of
argument_list|(
literal|"world"
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|addToCluserNodeLabelsWithDefaultExclusivity
argument_list|(
name|toSet
argument_list|(
literal|"hello1"
argument_list|,
literal|"world1"
argument_list|)
argument_list|)
expr_stmt|;
name|verifyNodeLabelAdded
argument_list|(
name|Sets
operator|.
name|newHashSet
argument_list|(
literal|"hello1"
argument_list|,
literal|"world1"
argument_list|)
argument_list|,
name|mgr
operator|.
name|lastAddedlabels
argument_list|)
expr_stmt|;
name|Assert
operator|.
name|assertTrue
argument_list|(
name|mgr
operator|.
name|getClusterNodeLabelNames
argument_list|()
operator|.
name|containsAll
argument_list|(
name|Sets
operator|.
name|newHashSet
argument_list|(
literal|"hello"
argument_list|,
literal|"world"
argument_list|,
literal|"hello1"
argument_list|,
literal|"world1"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
try|try
block|{
name|mgr
operator|.
name|addToCluserNodeLabels
argument_list|(
name|Arrays
operator|.
name|asList
argument_list|(
name|NodeLabel
operator|.
name|newInstance
argument_list|(
literal|"hello1"
argument_list|,
literal|false
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|Assert
operator|.
name|fail
argument_list|(
literal|"IOException not thrown on exclusivity change of labels"
argument_list|)
expr_stmt|;
block|}
catch|catch
parameter_list|(
name|Exception
name|e
parameter_list|)
block|{
name|Assert
operator|.
name|assertTrue
argument_list|(
literal|"IOException is expected when exclusivity is modified"
argument_list|,
name|e
operator|instanceof
name|IOException
argument_list|)
expr_stmt|;
block|}
try|try
block|{
name|mgr
operator|.
name|addToCluserNodeLabels
argument_list|(
name|Arrays
operator|.
name|asList
argument_list|(
name|NodeLabel
operator|.
name|newInstance
argument_list|(
literal|"hello1"
argument_list|,
literal|true
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
block|}
catch|catch
parameter_list|(
name|Exception
name|e
parameter_list|)
block|{
name|Assert
operator|.
name|assertFalse
argument_list|(
literal|"IOException not expected when no change in exclusivity"
argument_list|,
name|e
operator|instanceof
name|IOException
argument_list|)
expr_stmt|;
block|}
comment|// try to remove null, empty and non-existed label, should fail
for|for
control|(
name|String
name|p
range|:
name|Arrays
operator|.
name|asList
argument_list|(
literal|null
argument_list|,
name|CommonNodeLabelsManager
operator|.
name|NO_LABEL
argument_list|,
literal|"xx"
argument_list|)
control|)
block|{
name|boolean
name|caught
init|=
literal|false
decl_stmt|;
try|try
block|{
name|mgr
operator|.
name|removeFromClusterNodeLabels
argument_list|(
name|Arrays
operator|.
name|asList
argument_list|(
name|p
argument_list|)
argument_list|)
expr_stmt|;
block|}
catch|catch
parameter_list|(
name|IOException
name|e
parameter_list|)
block|{
name|caught
operator|=
literal|true
expr_stmt|;
block|}
name|Assert
operator|.
name|assertTrue
argument_list|(
literal|"remove label should fail "
operator|+
literal|"when label is null/empty/non-existed"
argument_list|,
name|caught
argument_list|)
expr_stmt|;
block|}
comment|// Remove some label
name|mgr
operator|.
name|removeFromClusterNodeLabels
argument_list|(
name|Arrays
operator|.
name|asList
argument_list|(
literal|"hello"
argument_list|)
argument_list|)
expr_stmt|;
name|assertCollectionEquals
argument_list|(
name|Sets
operator|.
name|newHashSet
argument_list|(
literal|"hello"
argument_list|)
argument_list|,
name|mgr
operator|.
name|lastRemovedlabels
argument_list|)
expr_stmt|;
name|Assert
operator|.
name|assertTrue
argument_list|(
name|mgr
operator|.
name|getClusterNodeLabelNames
argument_list|()
operator|.
name|containsAll
argument_list|(
name|Arrays
operator|.
name|asList
argument_list|(
literal|"world"
argument_list|,
literal|"hello1"
argument_list|,
literal|"world1"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|removeFromClusterNodeLabels
argument_list|(
name|Arrays
operator|.
name|asList
argument_list|(
literal|"hello1"
argument_list|,
literal|"world1"
argument_list|,
literal|"world"
argument_list|)
argument_list|)
expr_stmt|;
name|Assert
operator|.
name|assertTrue
argument_list|(
name|mgr
operator|.
name|lastRemovedlabels
operator|.
name|containsAll
argument_list|(
name|Sets
operator|.
name|newHashSet
argument_list|(
literal|"hello1"
argument_list|,
literal|"world1"
argument_list|,
literal|"world"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|Assert
operator|.
name|assertTrue
argument_list|(
name|mgr
operator|.
name|getClusterNodeLabelNames
argument_list|()
operator|.
name|isEmpty
argument_list|()
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
argument_list|(
name|timeout
operator|=
literal|5000
argument_list|)
DECL|method|testAddlabelWithCase ()
specifier|public
name|void
name|testAddlabelWithCase
parameter_list|()
throws|throws
name|Exception
block|{
comment|// Add some label, case will not ignore here
name|mgr
operator|.
name|addToCluserNodeLabelsWithDefaultExclusivity
argument_list|(
name|ImmutableSet
operator|.
name|of
argument_list|(
literal|"HeLlO"
argument_list|)
argument_list|)
expr_stmt|;
name|verifyNodeLabelAdded
argument_list|(
name|Sets
operator|.
name|newHashSet
argument_list|(
literal|"HeLlO"
argument_list|)
argument_list|,
name|mgr
operator|.
name|lastAddedlabels
argument_list|)
expr_stmt|;
name|Assert
operator|.
name|assertFalse
argument_list|(
name|mgr
operator|.
name|getClusterNodeLabelNames
argument_list|()
operator|.
name|containsAll
argument_list|(
name|Arrays
operator|.
name|asList
argument_list|(
literal|"hello"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
argument_list|(
name|timeout
operator|=
literal|5000
argument_list|)
DECL|method|testAddlabelWithExclusivity ()
specifier|public
name|void
name|testAddlabelWithExclusivity
parameter_list|()
throws|throws
name|Exception
block|{
comment|// Add some label, case will not ignore here
name|mgr
operator|.
name|addToCluserNodeLabels
argument_list|(
name|Arrays
operator|.
name|asList
argument_list|(
name|NodeLabel
operator|.
name|newInstance
argument_list|(
literal|"a"
argument_list|,
literal|false
argument_list|)
argument_list|,
name|NodeLabel
operator|.
name|newInstance
argument_list|(
literal|"b"
argument_list|,
literal|true
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|Assert
operator|.
name|assertFalse
argument_list|(
name|mgr
operator|.
name|isExclusiveNodeLabel
argument_list|(
literal|"a"
argument_list|)
argument_list|)
expr_stmt|;
name|Assert
operator|.
name|assertTrue
argument_list|(
name|mgr
operator|.
name|isExclusiveNodeLabel
argument_list|(
literal|"b"
argument_list|)
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
argument_list|(
name|timeout
operator|=
literal|5000
argument_list|)
DECL|method|testAddInvalidlabel ()
specifier|public
name|void
name|testAddInvalidlabel
parameter_list|()
throws|throws
name|IOException
block|{
name|boolean
name|caught
init|=
literal|false
decl_stmt|;
try|try
block|{
name|Set
argument_list|<
name|String
argument_list|>
name|set
init|=
operator|new
name|HashSet
argument_list|<
name|String
argument_list|>
argument_list|()
decl_stmt|;
name|set
operator|.
name|add
argument_list|(
literal|null
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|addToCluserNodeLabelsWithDefaultExclusivity
argument_list|(
name|set
argument_list|)
expr_stmt|;
block|}
catch|catch
parameter_list|(
name|IOException
name|e
parameter_list|)
block|{
name|caught
operator|=
literal|true
expr_stmt|;
block|}
name|Assert
operator|.
name|assertTrue
argument_list|(
literal|"null label should not add to repo"
argument_list|,
name|caught
argument_list|)
expr_stmt|;
name|caught
operator|=
literal|false
expr_stmt|;
try|try
block|{
name|mgr
operator|.
name|addToCluserNodeLabelsWithDefaultExclusivity
argument_list|(
name|ImmutableSet
operator|.
name|of
argument_list|(
name|CommonNodeLabelsManager
operator|.
name|NO_LABEL
argument_list|)
argument_list|)
expr_stmt|;
block|}
catch|catch
parameter_list|(
name|IOException
name|e
parameter_list|)
block|{
name|caught
operator|=
literal|true
expr_stmt|;
block|}
name|Assert
operator|.
name|assertTrue
argument_list|(
literal|"empty label should not add to repo"
argument_list|,
name|caught
argument_list|)
expr_stmt|;
name|caught
operator|=
literal|false
expr_stmt|;
try|try
block|{
name|mgr
operator|.
name|addToCluserNodeLabelsWithDefaultExclusivity
argument_list|(
name|ImmutableSet
operator|.
name|of
argument_list|(
literal|"-?"
argument_list|)
argument_list|)
expr_stmt|;
block|}
catch|catch
parameter_list|(
name|IOException
name|e
parameter_list|)
block|{
name|caught
operator|=
literal|true
expr_stmt|;
block|}
name|Assert
operator|.
name|assertTrue
argument_list|(
literal|"invalid label character should not add to repo"
argument_list|,
name|caught
argument_list|)
expr_stmt|;
name|caught
operator|=
literal|false
expr_stmt|;
try|try
block|{
name|mgr
operator|.
name|addToCluserNodeLabelsWithDefaultExclusivity
argument_list|(
name|ImmutableSet
operator|.
name|of
argument_list|(
name|StringUtils
operator|.
name|repeat
argument_list|(
literal|"c"
argument_list|,
literal|257
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
block|}
catch|catch
parameter_list|(
name|IOException
name|e
parameter_list|)
block|{
name|caught
operator|=
literal|true
expr_stmt|;
block|}
name|Assert
operator|.
name|assertTrue
argument_list|(
literal|"too long label should not add to repo"
argument_list|,
name|caught
argument_list|)
expr_stmt|;
name|caught
operator|=
literal|false
expr_stmt|;
try|try
block|{
name|mgr
operator|.
name|addToCluserNodeLabelsWithDefaultExclusivity
argument_list|(
name|ImmutableSet
operator|.
name|of
argument_list|(
literal|"-aaabbb"
argument_list|)
argument_list|)
expr_stmt|;
block|}
catch|catch
parameter_list|(
name|IOException
name|e
parameter_list|)
block|{
name|caught
operator|=
literal|true
expr_stmt|;
block|}
name|Assert
operator|.
name|assertTrue
argument_list|(
literal|"label cannot start with \"-\""
argument_list|,
name|caught
argument_list|)
expr_stmt|;
name|caught
operator|=
literal|false
expr_stmt|;
try|try
block|{
name|mgr
operator|.
name|addToCluserNodeLabelsWithDefaultExclusivity
argument_list|(
name|ImmutableSet
operator|.
name|of
argument_list|(
literal|"_aaabbb"
argument_list|)
argument_list|)
expr_stmt|;
block|}
catch|catch
parameter_list|(
name|IOException
name|e
parameter_list|)
block|{
name|caught
operator|=
literal|true
expr_stmt|;
block|}
name|Assert
operator|.
name|assertTrue
argument_list|(
literal|"label cannot start with \"_\""
argument_list|,
name|caught
argument_list|)
expr_stmt|;
name|caught
operator|=
literal|false
expr_stmt|;
try|try
block|{
name|mgr
operator|.
name|addToCluserNodeLabelsWithDefaultExclusivity
argument_list|(
name|ImmutableSet
operator|.
name|of
argument_list|(
literal|"a^aabbb"
argument_list|)
argument_list|)
expr_stmt|;
block|}
catch|catch
parameter_list|(
name|IOException
name|e
parameter_list|)
block|{
name|caught
operator|=
literal|true
expr_stmt|;
block|}
name|Assert
operator|.
name|assertTrue
argument_list|(
literal|"label cannot contains other chars like ^[] ..."
argument_list|,
name|caught
argument_list|)
expr_stmt|;
name|caught
operator|=
literal|false
expr_stmt|;
try|try
block|{
name|mgr
operator|.
name|addToCluserNodeLabelsWithDefaultExclusivity
argument_list|(
name|ImmutableSet
operator|.
name|of
argument_list|(
literal|"aa[a]bbb"
argument_list|)
argument_list|)
expr_stmt|;
block|}
catch|catch
parameter_list|(
name|IOException
name|e
parameter_list|)
block|{
name|caught
operator|=
literal|true
expr_stmt|;
block|}
name|Assert
operator|.
name|assertTrue
argument_list|(
literal|"label cannot contains other chars like ^[] ..."
argument_list|,
name|caught
argument_list|)
expr_stmt|;
block|}
annotation|@
name|SuppressWarnings
argument_list|(
block|{
literal|"unchecked"
block|,
literal|"rawtypes"
block|}
argument_list|)
annotation|@
name|Test
argument_list|(
name|timeout
operator|=
literal|5000
argument_list|)
DECL|method|testAddReplaceRemoveLabelsOnNodes ()
specifier|public
name|void
name|testAddReplaceRemoveLabelsOnNodes
parameter_list|()
throws|throws
name|Exception
block|{
comment|// set a label on a node, but label doesn't exist
name|boolean
name|caught
init|=
literal|false
decl_stmt|;
try|try
block|{
name|mgr
operator|.
name|replaceLabelsOnNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"node"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"label"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
block|}
catch|catch
parameter_list|(
name|IOException
name|e
parameter_list|)
block|{
name|caught
operator|=
literal|true
expr_stmt|;
block|}
name|Assert
operator|.
name|assertTrue
argument_list|(
literal|"trying to set a label to a node but "
operator|+
literal|"label doesn't exist in repository should fail"
argument_list|,
name|caught
argument_list|)
expr_stmt|;
comment|// set a label on a node, but node is null or empty
try|try
block|{
name|mgr
operator|.
name|replaceLabelsOnNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
name|CommonNodeLabelsManager
operator|.
name|NO_LABEL
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"label"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
block|}
catch|catch
parameter_list|(
name|IOException
name|e
parameter_list|)
block|{
name|caught
operator|=
literal|true
expr_stmt|;
block|}
name|Assert
operator|.
name|assertTrue
argument_list|(
literal|"trying to add a empty node but succeeded"
argument_list|,
name|caught
argument_list|)
expr_stmt|;
comment|// set node->label one by one
name|mgr
operator|.
name|addToCluserNodeLabelsWithDefaultExclusivity
argument_list|(
name|toSet
argument_list|(
literal|"p1"
argument_list|,
literal|"p2"
argument_list|,
literal|"p3"
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|replaceLabelsOnNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|replaceLabelsOnNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p2"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|replaceLabelsOnNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n2"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p3"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertMapEquals
argument_list|(
name|mgr
operator|.
name|getNodeLabels
argument_list|()
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p2"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n2"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p3"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertMapEquals
argument_list|(
name|mgr
operator|.
name|lastNodeToLabels
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n2"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p3"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
comment|// set bunch of node->label
name|mgr
operator|.
name|replaceLabelsOnNode
argument_list|(
operator|(
name|Map
operator|)
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n3"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p3"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertMapEquals
argument_list|(
name|mgr
operator|.
name|getNodeLabels
argument_list|()
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n2"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p3"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n3"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p3"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertMapEquals
argument_list|(
name|mgr
operator|.
name|lastNodeToLabels
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n3"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p3"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
comment|/* * n1: p1 * n2: p3 * n3: p3 */
comment|// remove label on node
name|mgr
operator|.
name|removeLabelsFromNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertMapEquals
argument_list|(
name|mgr
operator|.
name|getNodeLabels
argument_list|()
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n2"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p3"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n3"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p3"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertMapEquals
argument_list|(
name|mgr
operator|.
name|lastNodeToLabels
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|CommonNodeLabelsManager
operator|.
name|EMPTY_STRING_SET
argument_list|)
argument_list|)
expr_stmt|;
comment|// add label on node
name|mgr
operator|.
name|addLabelsToNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertMapEquals
argument_list|(
name|mgr
operator|.
name|getNodeLabels
argument_list|()
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n2"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p3"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n3"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p3"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertMapEquals
argument_list|(
name|mgr
operator|.
name|lastNodeToLabels
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
comment|// remove labels on node
name|mgr
operator|.
name|removeLabelsFromNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n2"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p3"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n3"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p3"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|Assert
operator|.
name|assertEquals
argument_list|(
literal|0
argument_list|,
name|mgr
operator|.
name|getNodeLabels
argument_list|()
operator|.
name|size
argument_list|()
argument_list|)
expr_stmt|;
name|assertMapEquals
argument_list|(
name|mgr
operator|.
name|lastNodeToLabels
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|CommonNodeLabelsManager
operator|.
name|EMPTY_STRING_SET
argument_list|,
name|toNodeId
argument_list|(
literal|"n2"
argument_list|)
argument_list|,
name|CommonNodeLabelsManager
operator|.
name|EMPTY_STRING_SET
argument_list|,
name|toNodeId
argument_list|(
literal|"n3"
argument_list|)
argument_list|,
name|CommonNodeLabelsManager
operator|.
name|EMPTY_STRING_SET
argument_list|)
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
argument_list|(
name|timeout
operator|=
literal|5000
argument_list|)
DECL|method|testRemovelabelWithNodes ()
specifier|public
name|void
name|testRemovelabelWithNodes
parameter_list|()
throws|throws
name|Exception
block|{
name|mgr
operator|.
name|addToCluserNodeLabelsWithDefaultExclusivity
argument_list|(
name|toSet
argument_list|(
literal|"p1"
argument_list|,
literal|"p2"
argument_list|,
literal|"p3"
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|replaceLabelsOnNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|replaceLabelsOnNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n2"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p2"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|replaceLabelsOnNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n3"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p3"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|removeFromClusterNodeLabels
argument_list|(
name|ImmutableSet
operator|.
name|of
argument_list|(
literal|"p1"
argument_list|)
argument_list|)
expr_stmt|;
name|assertMapEquals
argument_list|(
name|mgr
operator|.
name|getNodeLabels
argument_list|()
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n2"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p2"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n3"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p3"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertCollectionEquals
argument_list|(
name|Arrays
operator|.
name|asList
argument_list|(
literal|"p1"
argument_list|)
argument_list|,
name|mgr
operator|.
name|lastRemovedlabels
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|removeFromClusterNodeLabels
argument_list|(
name|ImmutableSet
operator|.
name|of
argument_list|(
literal|"p2"
argument_list|,
literal|"p3"
argument_list|)
argument_list|)
expr_stmt|;
name|Assert
operator|.
name|assertTrue
argument_list|(
name|mgr
operator|.
name|getNodeLabels
argument_list|()
operator|.
name|isEmpty
argument_list|()
argument_list|)
expr_stmt|;
name|Assert
operator|.
name|assertTrue
argument_list|(
name|mgr
operator|.
name|getClusterNodeLabelNames
argument_list|()
operator|.
name|isEmpty
argument_list|()
argument_list|)
expr_stmt|;
name|assertCollectionEquals
argument_list|(
name|Arrays
operator|.
name|asList
argument_list|(
literal|"p2"
argument_list|,
literal|"p3"
argument_list|)
argument_list|,
name|mgr
operator|.
name|lastRemovedlabels
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
argument_list|(
name|timeout
operator|=
literal|5000
argument_list|)
DECL|method|testTrimLabelsWhenAddRemoveNodeLabels ()
specifier|public
name|void
name|testTrimLabelsWhenAddRemoveNodeLabels
parameter_list|()
throws|throws
name|IOException
block|{
name|mgr
operator|.
name|addToCluserNodeLabelsWithDefaultExclusivity
argument_list|(
name|toSet
argument_list|(
literal|" p1"
argument_list|)
argument_list|)
expr_stmt|;
name|assertCollectionEquals
argument_list|(
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|,
name|mgr
operator|.
name|getClusterNodeLabelNames
argument_list|()
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|removeFromClusterNodeLabels
argument_list|(
name|toSet
argument_list|(
literal|"p1 "
argument_list|)
argument_list|)
expr_stmt|;
name|Assert
operator|.
name|assertTrue
argument_list|(
name|mgr
operator|.
name|getClusterNodeLabelNames
argument_list|()
operator|.
name|isEmpty
argument_list|()
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
argument_list|(
name|timeout
operator|=
literal|5000
argument_list|)
DECL|method|testTrimLabelsWhenModifyLabelsOnNodes ()
specifier|public
name|void
name|testTrimLabelsWhenModifyLabelsOnNodes
parameter_list|()
throws|throws
name|IOException
block|{
name|mgr
operator|.
name|addToCluserNodeLabelsWithDefaultExclusivity
argument_list|(
name|toSet
argument_list|(
literal|" p1"
argument_list|,
literal|"p2"
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|addLabelsToNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1 "
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertMapEquals
argument_list|(
name|mgr
operator|.
name|getNodeLabels
argument_list|()
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|replaceLabelsOnNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|" p2"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertMapEquals
argument_list|(
name|mgr
operator|.
name|getNodeLabels
argument_list|()
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p2"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|removeLabelsFromNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|" p2 "
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|Assert
operator|.
name|assertTrue
argument_list|(
name|mgr
operator|.
name|getNodeLabels
argument_list|()
operator|.
name|isEmpty
argument_list|()
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
argument_list|(
name|timeout
operator|=
literal|5000
argument_list|)
DECL|method|testReplaceLabelsOnHostsShouldUpdateNodesBelongTo ()
specifier|public
name|void
name|testReplaceLabelsOnHostsShouldUpdateNodesBelongTo
parameter_list|()
throws|throws
name|IOException
block|{
name|mgr
operator|.
name|addToCluserNodeLabelsWithDefaultExclusivity
argument_list|(
name|toSet
argument_list|(
literal|"p1"
argument_list|,
literal|"p2"
argument_list|,
literal|"p3"
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|addLabelsToNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertMapEquals
argument_list|(
name|mgr
operator|.
name|getNodeLabels
argument_list|()
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
comment|// Replace labels on n1:1 to P2
name|mgr
operator|.
name|replaceLabelsOnNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1:1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p2"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n1:2"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p2"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertMapEquals
argument_list|(
name|mgr
operator|.
name|getNodeLabels
argument_list|()
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n1:1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p2"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n1:2"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p2"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
comment|// Replace labels on n1 to P1, both n1:1/n1 will be P1 now
name|mgr
operator|.
name|replaceLabelsOnNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertMapEquals
argument_list|(
name|mgr
operator|.
name|getNodeLabels
argument_list|()
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n1:1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n1:2"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
comment|// Set labels on n1:1 to P2 again to verify if add/remove works
name|mgr
operator|.
name|replaceLabelsOnNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1:1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p2"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
block|}
DECL|method|assertNodeLabelsDisabledErrorMessage (IOException e)
specifier|private
name|void
name|assertNodeLabelsDisabledErrorMessage
parameter_list|(
name|IOException
name|e
parameter_list|)
block|{
name|Assert
operator|.
name|assertEquals
argument_list|(
name|CommonNodeLabelsManager
operator|.
name|NODE_LABELS_NOT_ENABLED_ERR
argument_list|,
name|e
operator|.
name|getMessage
argument_list|()
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
argument_list|(
name|timeout
operator|=
literal|5000
argument_list|)
DECL|method|testNodeLabelsDisabled ()
specifier|public
name|void
name|testNodeLabelsDisabled
parameter_list|()
throws|throws
name|IOException
block|{
name|DummyCommonNodeLabelsManager
name|mgr
init|=
operator|new
name|DummyCommonNodeLabelsManager
argument_list|()
decl_stmt|;
name|Configuration
name|conf
init|=
operator|new
name|YarnConfiguration
argument_list|()
decl_stmt|;
name|conf
operator|.
name|setBoolean
argument_list|(
name|YarnConfiguration
operator|.
name|NODE_LABELS_ENABLED
argument_list|,
literal|false
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|init
argument_list|(
name|conf
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|start
argument_list|()
expr_stmt|;
name|boolean
name|caught
init|=
literal|false
decl_stmt|;
comment|// add labels
try|try
block|{
name|mgr
operator|.
name|addToCluserNodeLabelsWithDefaultExclusivity
argument_list|(
name|ImmutableSet
operator|.
name|of
argument_list|(
literal|"x"
argument_list|)
argument_list|)
expr_stmt|;
block|}
catch|catch
parameter_list|(
name|IOException
name|e
parameter_list|)
block|{
name|assertNodeLabelsDisabledErrorMessage
argument_list|(
name|e
argument_list|)
expr_stmt|;
name|caught
operator|=
literal|true
expr_stmt|;
block|}
comment|// check exception caught
name|Assert
operator|.
name|assertTrue
argument_list|(
name|caught
argument_list|)
expr_stmt|;
name|caught
operator|=
literal|false
expr_stmt|;
comment|// remove labels
try|try
block|{
name|mgr
operator|.
name|removeFromClusterNodeLabels
argument_list|(
name|ImmutableSet
operator|.
name|of
argument_list|(
literal|"x"
argument_list|)
argument_list|)
expr_stmt|;
block|}
catch|catch
parameter_list|(
name|IOException
name|e
parameter_list|)
block|{
name|assertNodeLabelsDisabledErrorMessage
argument_list|(
name|e
argument_list|)
expr_stmt|;
name|caught
operator|=
literal|true
expr_stmt|;
block|}
comment|// check exception caught
name|Assert
operator|.
name|assertTrue
argument_list|(
name|caught
argument_list|)
expr_stmt|;
name|caught
operator|=
literal|false
expr_stmt|;
comment|// add labels to node
try|try
block|{
name|mgr
operator|.
name|addLabelsToNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|NodeId
operator|.
name|newInstance
argument_list|(
literal|"host"
argument_list|,
literal|0
argument_list|)
argument_list|,
name|CommonNodeLabelsManager
operator|.
name|EMPTY_STRING_SET
argument_list|)
argument_list|)
expr_stmt|;
block|}
catch|catch
parameter_list|(
name|IOException
name|e
parameter_list|)
block|{
name|assertNodeLabelsDisabledErrorMessage
argument_list|(
name|e
argument_list|)
expr_stmt|;
name|caught
operator|=
literal|true
expr_stmt|;
block|}
comment|// check exception caught
name|Assert
operator|.
name|assertTrue
argument_list|(
name|caught
argument_list|)
expr_stmt|;
name|caught
operator|=
literal|false
expr_stmt|;
comment|// remove labels from node
try|try
block|{
name|mgr
operator|.
name|removeLabelsFromNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|NodeId
operator|.
name|newInstance
argument_list|(
literal|"host"
argument_list|,
literal|0
argument_list|)
argument_list|,
name|CommonNodeLabelsManager
operator|.
name|EMPTY_STRING_SET
argument_list|)
argument_list|)
expr_stmt|;
block|}
catch|catch
parameter_list|(
name|IOException
name|e
parameter_list|)
block|{
name|assertNodeLabelsDisabledErrorMessage
argument_list|(
name|e
argument_list|)
expr_stmt|;
name|caught
operator|=
literal|true
expr_stmt|;
block|}
comment|// check exception caught
name|Assert
operator|.
name|assertTrue
argument_list|(
name|caught
argument_list|)
expr_stmt|;
name|caught
operator|=
literal|false
expr_stmt|;
comment|// replace labels on node
try|try
block|{
name|mgr
operator|.
name|replaceLabelsOnNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|NodeId
operator|.
name|newInstance
argument_list|(
literal|"host"
argument_list|,
literal|0
argument_list|)
argument_list|,
name|CommonNodeLabelsManager
operator|.
name|EMPTY_STRING_SET
argument_list|)
argument_list|)
expr_stmt|;
block|}
catch|catch
parameter_list|(
name|IOException
name|e
parameter_list|)
block|{
name|assertNodeLabelsDisabledErrorMessage
argument_list|(
name|e
argument_list|)
expr_stmt|;
name|caught
operator|=
literal|true
expr_stmt|;
block|}
comment|// check exception caught
name|Assert
operator|.
name|assertTrue
argument_list|(
name|caught
argument_list|)
expr_stmt|;
name|caught
operator|=
literal|false
expr_stmt|;
name|mgr
operator|.
name|close
argument_list|()
expr_stmt|;
block|}
annotation|@
name|Test
argument_list|(
name|timeout
operator|=
literal|5000
argument_list|)
DECL|method|testLabelsToNodes ()
specifier|public
name|void
name|testLabelsToNodes
parameter_list|()
throws|throws
name|IOException
block|{
name|mgr
operator|.
name|addToCluserNodeLabelsWithDefaultExclusivity
argument_list|(
name|toSet
argument_list|(
literal|"p1"
argument_list|,
literal|"p2"
argument_list|,
literal|"p3"
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|addLabelsToNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|Map
argument_list|<
name|String
argument_list|,
name|Set
argument_list|<
name|NodeId
argument_list|>
argument_list|>
name|labelsToNodes
init|=
name|mgr
operator|.
name|getLabelsToNodes
argument_list|()
decl_stmt|;
name|assertLabelsToNodesEquals
argument_list|(
name|labelsToNodes
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
literal|"p1"
argument_list|,
name|toSet
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertLabelsToNodesEquals
argument_list|(
name|labelsToNodes
argument_list|,
name|transposeNodeToLabels
argument_list|(
name|mgr
operator|.
name|getNodeLabels
argument_list|()
argument_list|)
argument_list|)
expr_stmt|;
comment|// Replace labels on n1:1 to P2
name|mgr
operator|.
name|replaceLabelsOnNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1:1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p2"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n1:2"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p2"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|labelsToNodes
operator|=
name|mgr
operator|.
name|getLabelsToNodes
argument_list|()
expr_stmt|;
name|assertLabelsToNodesEquals
argument_list|(
name|labelsToNodes
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
literal|"p1"
argument_list|,
name|toSet
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|)
argument_list|,
literal|"p2"
argument_list|,
name|toSet
argument_list|(
name|toNodeId
argument_list|(
literal|"n1:1"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n1:2"
argument_list|)
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertLabelsToNodesEquals
argument_list|(
name|labelsToNodes
argument_list|,
name|transposeNodeToLabels
argument_list|(
name|mgr
operator|.
name|getNodeLabels
argument_list|()
argument_list|)
argument_list|)
expr_stmt|;
comment|// Replace labels on n1 to P1, both n1:1/n1 will be P1 now
name|mgr
operator|.
name|replaceLabelsOnNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|labelsToNodes
operator|=
name|mgr
operator|.
name|getLabelsToNodes
argument_list|()
expr_stmt|;
name|assertLabelsToNodesEquals
argument_list|(
name|labelsToNodes
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
literal|"p1"
argument_list|,
name|toSet
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n1:1"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n1:2"
argument_list|)
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertLabelsToNodesEquals
argument_list|(
name|labelsToNodes
argument_list|,
name|transposeNodeToLabels
argument_list|(
name|mgr
operator|.
name|getNodeLabels
argument_list|()
argument_list|)
argument_list|)
expr_stmt|;
comment|// Set labels on n1:1 to P2 again to verify if add/remove works
name|mgr
operator|.
name|replaceLabelsOnNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1:1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p2"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
comment|// Add p3 to n1, should makes n1:1 to be p2/p3, and n1:2 to be p1/p3
name|mgr
operator|.
name|addLabelsToNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n2"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p3"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|labelsToNodes
operator|=
name|mgr
operator|.
name|getLabelsToNodes
argument_list|()
expr_stmt|;
name|assertLabelsToNodesEquals
argument_list|(
name|labelsToNodes
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
literal|"p1"
argument_list|,
name|toSet
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n1:2"
argument_list|)
argument_list|)
argument_list|,
literal|"p2"
argument_list|,
name|toSet
argument_list|(
name|toNodeId
argument_list|(
literal|"n1:1"
argument_list|)
argument_list|)
argument_list|,
literal|"p3"
argument_list|,
name|toSet
argument_list|(
name|toNodeId
argument_list|(
literal|"n2"
argument_list|)
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertLabelsToNodesEquals
argument_list|(
name|labelsToNodes
argument_list|,
name|transposeNodeToLabels
argument_list|(
name|mgr
operator|.
name|getNodeLabels
argument_list|()
argument_list|)
argument_list|)
expr_stmt|;
comment|// Remove P3 from n1, should makes n1:1 to be p2, and n1:2 to be p1
name|mgr
operator|.
name|removeLabelsFromNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n2"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p3"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|labelsToNodes
operator|=
name|mgr
operator|.
name|getLabelsToNodes
argument_list|()
expr_stmt|;
name|assertLabelsToNodesEquals
argument_list|(
name|labelsToNodes
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
literal|"p1"
argument_list|,
name|toSet
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n1:2"
argument_list|)
argument_list|)
argument_list|,
literal|"p2"
argument_list|,
name|toSet
argument_list|(
name|toNodeId
argument_list|(
literal|"n1:1"
argument_list|)
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertLabelsToNodesEquals
argument_list|(
name|labelsToNodes
argument_list|,
name|transposeNodeToLabels
argument_list|(
name|mgr
operator|.
name|getNodeLabels
argument_list|()
argument_list|)
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
argument_list|(
name|timeout
operator|=
literal|5000
argument_list|)
DECL|method|testLabelsToNodesForSelectedLabels ()
specifier|public
name|void
name|testLabelsToNodesForSelectedLabels
parameter_list|()
throws|throws
name|IOException
block|{
name|mgr
operator|.
name|addToCluserNodeLabelsWithDefaultExclusivity
argument_list|(
name|toSet
argument_list|(
literal|"p1"
argument_list|,
literal|"p2"
argument_list|,
literal|"p3"
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|addLabelsToNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1:1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n1:2"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p2"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|Set
argument_list|<
name|String
argument_list|>
name|setlabels
init|=
operator|new
name|HashSet
argument_list|<
name|String
argument_list|>
argument_list|(
name|Arrays
operator|.
name|asList
argument_list|(
operator|new
name|String
index|[]
block|{
literal|"p1"
block|}
argument_list|)
argument_list|)
decl_stmt|;
name|assertLabelsToNodesEquals
argument_list|(
name|mgr
operator|.
name|getLabelsToNodes
argument_list|(
name|setlabels
argument_list|)
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
literal|"p1"
argument_list|,
name|toSet
argument_list|(
name|toNodeId
argument_list|(
literal|"n1:1"
argument_list|)
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
comment|// Replace labels on n1:1 to P3
name|mgr
operator|.
name|replaceLabelsOnNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p3"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertTrue
argument_list|(
name|mgr
operator|.
name|getLabelsToNodes
argument_list|(
name|setlabels
argument_list|)
operator|.
name|isEmpty
argument_list|()
argument_list|)
expr_stmt|;
name|setlabels
operator|=
operator|new
name|HashSet
argument_list|<
name|String
argument_list|>
argument_list|(
name|Arrays
operator|.
name|asList
argument_list|(
operator|new
name|String
index|[]
block|{
literal|"p2"
block|,
literal|"p3"
block|}
argument_list|)
argument_list|)
expr_stmt|;
name|assertLabelsToNodesEquals
argument_list|(
name|mgr
operator|.
name|getLabelsToNodes
argument_list|(
name|setlabels
argument_list|)
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
literal|"p3"
argument_list|,
name|toSet
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n1:1"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n1:2"
argument_list|)
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|addLabelsToNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n2"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p2"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertLabelsToNodesEquals
argument_list|(
name|mgr
operator|.
name|getLabelsToNodes
argument_list|(
name|setlabels
argument_list|)
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
literal|"p2"
argument_list|,
name|toSet
argument_list|(
name|toNodeId
argument_list|(
literal|"n2"
argument_list|)
argument_list|)
argument_list|,
literal|"p3"
argument_list|,
name|toSet
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n1:1"
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n1:2"
argument_list|)
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|removeLabelsFromNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p3"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|setlabels
operator|=
operator|new
name|HashSet
argument_list|<
name|String
argument_list|>
argument_list|(
name|Arrays
operator|.
name|asList
argument_list|(
operator|new
name|String
index|[]
block|{
literal|"p1"
block|,
literal|"p2"
block|,
literal|"p3"
block|}
argument_list|)
argument_list|)
expr_stmt|;
name|assertLabelsToNodesEquals
argument_list|(
name|mgr
operator|.
name|getLabelsToNodes
argument_list|(
name|setlabels
argument_list|)
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
literal|"p2"
argument_list|,
name|toSet
argument_list|(
name|toNodeId
argument_list|(
literal|"n2"
argument_list|)
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|addLabelsToNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n3"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertLabelsToNodesEquals
argument_list|(
name|mgr
operator|.
name|getLabelsToNodes
argument_list|(
name|setlabels
argument_list|)
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
literal|"p1"
argument_list|,
name|toSet
argument_list|(
name|toNodeId
argument_list|(
literal|"n3"
argument_list|)
argument_list|)
argument_list|,
literal|"p2"
argument_list|,
name|toSet
argument_list|(
name|toNodeId
argument_list|(
literal|"n2"
argument_list|)
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|replaceLabelsOnNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n2:2"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p3"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertLabelsToNodesEquals
argument_list|(
name|mgr
operator|.
name|getLabelsToNodes
argument_list|(
name|setlabels
argument_list|)
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
literal|"p1"
argument_list|,
name|toSet
argument_list|(
name|toNodeId
argument_list|(
literal|"n3"
argument_list|)
argument_list|)
argument_list|,
literal|"p2"
argument_list|,
name|toSet
argument_list|(
name|toNodeId
argument_list|(
literal|"n2"
argument_list|)
argument_list|)
argument_list|,
literal|"p3"
argument_list|,
name|toSet
argument_list|(
name|toNodeId
argument_list|(
literal|"n2:2"
argument_list|)
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|setlabels
operator|=
operator|new
name|HashSet
argument_list|<
name|String
argument_list|>
argument_list|(
name|Arrays
operator|.
name|asList
argument_list|(
operator|new
name|String
index|[]
block|{
literal|"p1"
block|}
argument_list|)
argument_list|)
expr_stmt|;
name|assertLabelsToNodesEquals
argument_list|(
name|mgr
operator|.
name|getLabelsToNodes
argument_list|(
name|setlabels
argument_list|)
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
literal|"p1"
argument_list|,
name|toSet
argument_list|(
name|toNodeId
argument_list|(
literal|"n3"
argument_list|)
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
argument_list|(
name|timeout
operator|=
literal|5000
argument_list|)
DECL|method|testNoMoreThanOneLabelExistedInOneHost ()
specifier|public
name|void
name|testNoMoreThanOneLabelExistedInOneHost
parameter_list|()
throws|throws
name|IOException
block|{
name|boolean
name|failed
init|=
literal|false
decl_stmt|;
comment|// As in YARN-2694, we temporarily disable no more than one label existed in
comment|// one host
name|mgr
operator|.
name|addToCluserNodeLabelsWithDefaultExclusivity
argument_list|(
name|toSet
argument_list|(
literal|"p1"
argument_list|,
literal|"p2"
argument_list|,
literal|"p3"
argument_list|)
argument_list|)
expr_stmt|;
try|try
block|{
name|mgr
operator|.
name|replaceLabelsOnNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|,
literal|"p2"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
block|}
catch|catch
parameter_list|(
name|IOException
name|e
parameter_list|)
block|{
name|failed
operator|=
literal|true
expr_stmt|;
block|}
name|Assert
operator|.
name|assertTrue
argument_list|(
literal|"Should failed when set> 1 labels on a host"
argument_list|,
name|failed
argument_list|)
expr_stmt|;
try|try
block|{
name|mgr
operator|.
name|addLabelsToNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|,
literal|"p2"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
block|}
catch|catch
parameter_list|(
name|IOException
name|e
parameter_list|)
block|{
name|failed
operator|=
literal|true
expr_stmt|;
block|}
name|Assert
operator|.
name|assertTrue
argument_list|(
literal|"Should failed when add> 1 labels on a host"
argument_list|,
name|failed
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|addLabelsToNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
comment|// add a same label to a node, #labels in this node is still 1, shouldn't
comment|// fail
name|mgr
operator|.
name|addLabelsToNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
try|try
block|{
name|mgr
operator|.
name|addLabelsToNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p2"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
block|}
catch|catch
parameter_list|(
name|IOException
name|e
parameter_list|)
block|{
name|failed
operator|=
literal|true
expr_stmt|;
block|}
name|Assert
operator|.
name|assertTrue
argument_list|(
literal|"Should failed when #labels> 1 on a host after add"
argument_list|,
name|failed
argument_list|)
expr_stmt|;
block|}
DECL|method|verifyNodeLabelAdded (Set<String> expectedAddedLabelNames, Collection<NodeLabel> addedNodeLabels)
specifier|private
name|void
name|verifyNodeLabelAdded
parameter_list|(
name|Set
argument_list|<
name|String
argument_list|>
name|expectedAddedLabelNames
parameter_list|,
name|Collection
argument_list|<
name|NodeLabel
argument_list|>
name|addedNodeLabels
parameter_list|)
block|{
name|Assert
operator|.
name|assertEquals
argument_list|(
name|expectedAddedLabelNames
operator|.
name|size
argument_list|()
argument_list|,
name|addedNodeLabels
operator|.
name|size
argument_list|()
argument_list|)
expr_stmt|;
for|for
control|(
name|NodeLabel
name|label
range|:
name|addedNodeLabels
control|)
block|{
name|Assert
operator|.
name|assertTrue
argument_list|(
name|expectedAddedLabelNames
operator|.
name|contains
argument_list|(
name|label
operator|.
name|getName
argument_list|()
argument_list|)
argument_list|)
expr_stmt|;
block|}
block|}
annotation|@
name|Test
argument_list|(
name|timeout
operator|=
literal|5000
argument_list|)
DECL|method|testReplaceLabelsOnNodeInDistributedMode ()
specifier|public
name|void
name|testReplaceLabelsOnNodeInDistributedMode
parameter_list|()
throws|throws
name|Exception
block|{
comment|//create new DummyCommonNodeLabelsManager than the one got from @before
name|mgr
operator|.
name|stop
argument_list|()
expr_stmt|;
name|mgr
operator|=
operator|new
name|DummyCommonNodeLabelsManager
argument_list|()
expr_stmt|;
name|Configuration
name|conf
init|=
operator|new
name|YarnConfiguration
argument_list|()
decl_stmt|;
name|conf
operator|.
name|setBoolean
argument_list|(
name|YarnConfiguration
operator|.
name|NODE_LABELS_ENABLED
argument_list|,
literal|true
argument_list|)
expr_stmt|;
name|conf
operator|.
name|set
argument_list|(
name|YarnConfiguration
operator|.
name|NODELABEL_CONFIGURATION_TYPE
argument_list|,
name|YarnConfiguration
operator|.
name|DISTRIBUTED_NODELABEL_CONFIGURATION_TYPE
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|init
argument_list|(
name|conf
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|start
argument_list|()
expr_stmt|;
name|mgr
operator|.
name|addToCluserNodeLabelsWithDefaultExclusivity
argument_list|(
name|toSet
argument_list|(
literal|"p1"
argument_list|,
literal|"p2"
argument_list|,
literal|"p3"
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|replaceLabelsOnNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|Set
argument_list|<
name|String
argument_list|>
name|labelsByNode
init|=
name|mgr
operator|.
name|getLabelsByNode
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|)
decl_stmt|;
name|Assert
operator|.
name|assertNull
argument_list|(
literal|"Labels are not expected to be written to the NodeLabelStore"
argument_list|,
name|mgr
operator|.
name|lastNodeToLabels
argument_list|)
expr_stmt|;
name|Assert
operator|.
name|assertNotNull
argument_list|(
literal|"Updated labels should be available from the Mgr"
argument_list|,
name|labelsByNode
argument_list|)
expr_stmt|;
name|Assert
operator|.
name|assertTrue
argument_list|(
name|labelsByNode
operator|.
name|contains
argument_list|(
literal|"p1"
argument_list|)
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
argument_list|(
name|timeout
operator|=
literal|5000
argument_list|)
DECL|method|testLabelsInfoToNodes ()
specifier|public
name|void
name|testLabelsInfoToNodes
parameter_list|()
throws|throws
name|IOException
block|{
name|mgr
operator|.
name|addToCluserNodeLabels
argument_list|(
name|Arrays
operator|.
name|asList
argument_list|(
name|NodeLabel
operator|.
name|newInstance
argument_list|(
literal|"p1"
argument_list|,
literal|false
argument_list|)
argument_list|,
name|NodeLabel
operator|.
name|newInstance
argument_list|(
literal|"p2"
argument_list|,
literal|true
argument_list|)
argument_list|,
name|NodeLabel
operator|.
name|newInstance
argument_list|(
literal|"p3"
argument_list|,
literal|true
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|addLabelsToNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p1"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|Map
argument_list|<
name|NodeLabel
argument_list|,
name|Set
argument_list|<
name|NodeId
argument_list|>
argument_list|>
name|labelsToNodes
init|=
name|mgr
operator|.
name|getLabelsInfoToNodes
argument_list|()
decl_stmt|;
name|assertLabelsInfoToNodesEquals
argument_list|(
name|labelsToNodes
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
name|NodeLabel
operator|.
name|newInstance
argument_list|(
literal|"p1"
argument_list|,
literal|false
argument_list|)
argument_list|,
name|toSet
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
block|}
annotation|@
name|Test
argument_list|(
name|timeout
operator|=
literal|5000
argument_list|)
DECL|method|testGetNodeLabelsInfo ()
specifier|public
name|void
name|testGetNodeLabelsInfo
parameter_list|()
throws|throws
name|IOException
block|{
name|mgr
operator|.
name|addToCluserNodeLabels
argument_list|(
name|Arrays
operator|.
name|asList
argument_list|(
name|NodeLabel
operator|.
name|newInstance
argument_list|(
literal|"p1"
argument_list|,
literal|false
argument_list|)
argument_list|,
name|NodeLabel
operator|.
name|newInstance
argument_list|(
literal|"p2"
argument_list|,
literal|true
argument_list|)
argument_list|,
name|NodeLabel
operator|.
name|newInstance
argument_list|(
literal|"p3"
argument_list|,
literal|false
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|addLabelsToNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p2"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|mgr
operator|.
name|addLabelsToNode
argument_list|(
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n2"
argument_list|)
argument_list|,
name|toSet
argument_list|(
literal|"p3"
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
name|assertLabelInfoMapEquals
argument_list|(
name|mgr
operator|.
name|getNodeLabelsInfo
argument_list|()
argument_list|,
name|ImmutableMap
operator|.
name|of
argument_list|(
name|toNodeId
argument_list|(
literal|"n1"
argument_list|)
argument_list|,
name|toSet
argument_list|(
name|NodeLabel
operator|.
name|newInstance
argument_list|(
literal|"p2"
argument_list|,
literal|true
argument_list|)
argument_list|)
argument_list|,
name|toNodeId
argument_list|(
literal|"n2"
argument_list|)
argument_list|,
name|toSet
argument_list|(
name|NodeLabel
operator|.
name|newInstance
argument_list|(
literal|"p3"
argument_list|,
literal|false
argument_list|)
argument_list|)
argument_list|)
argument_list|)
expr_stmt|;
block|}
block|}
end_class
end_unit
|
package org.firstinspires.ftc.teamcode.unitTests.dummy;
import android.content.BroadcastReceiver;
import android.content.ComponentName;
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.IntentSender;
import android.content.ServiceConnection;
import android.content.SharedPreferences;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.content.res.AssetManager;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.database.DatabaseErrorHandler;
import android.database.sqlite.SQLiteDatabase;
import android.graphics.Bitmap;
import android.graphics.drawable.Drawable;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.os.UserHandle;
import android.view.Display;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
public class DummyContext extends Context {
@Override
public AssetManager getAssets() {
return null;
}
@Override
public Resources getResources() {
return null;
}
@Override
public PackageManager getPackageManager() {
return null;
}
@Override
public ContentResolver getContentResolver() {
return null;
}
@Override
public Looper getMainLooper() {
return null;
}
@Override
public Context getApplicationContext() {
return null;
}
@Override
public void setTheme(int resid) {
}
@Override
public Resources.Theme getTheme() {
return null;
}
@Override
public ClassLoader getClassLoader() {
return null;
}
@Override
public String getPackageName() {
return null;
}
@Override
public ApplicationInfo getApplicationInfo() {
return null;
}
@Override
public String getPackageResourcePath() {
return null;
}
@Override
public String getPackageCodePath() {
return null;
}
@Override
public SharedPreferences getSharedPreferences(String name, int mode) {
return null;
}
@Override
public boolean moveSharedPreferencesFrom(Context sourceContext, String name) {
return false;
}
@Override
public boolean deleteSharedPreferences(String name) {
return false;
}
@Override
public FileInputStream openFileInput(String name) throws FileNotFoundException {
return null;
}
@Override
public FileOutputStream openFileOutput(String name, int mode) throws FileNotFoundException {
return null;
}
@Override
public boolean deleteFile(String name) {
return false;
}
@Override
public File getFileStreamPath(String name) {
return null;
}
@Override
public File getDataDir() {
return null;
}
@Override
public File getFilesDir() {
return null;
}
@Override
public File getNoBackupFilesDir() {
return null;
}
@Nullable
@Override
public File getExternalFilesDir(@Nullable String type) {
return null;
}
@Override
public File[] getExternalFilesDirs(String type) {
return new File[0];
}
@Override
public File getObbDir() {
return null;
}
@Override
public File[] getObbDirs() {
return new File[0];
}
@Override
public File getCacheDir() {
return null;
}
@Override
public File getCodeCacheDir() {
return null;
}
@Nullable
@Override
public File getExternalCacheDir() {
return null;
}
@Override
public File[] getExternalCacheDirs() {
return new File[0];
}
@Override
public File[] getExternalMediaDirs() {
return new File[0];
}
@Override
public String[] fileList() {
return new String[0];
}
@Override
public File getDir(String name, int mode) {
return null;
}
@Override
public SQLiteDatabase openOrCreateDatabase(String name, int mode, SQLiteDatabase.CursorFactory factory) {
return null;
}
@Override
public SQLiteDatabase openOrCreateDatabase(String name, int mode, SQLiteDatabase.CursorFactory factory, @Nullable DatabaseErrorHandler errorHandler) {
return null;
}
@Override
public boolean moveDatabaseFrom(Context sourceContext, String name) {
return false;
}
@Override
public boolean deleteDatabase(String name) {
return false;
}
@Override
public File getDatabasePath(String name) {
return null;
}
@Override
public String[] databaseList() {
return new String[0];
}
@Override
public Drawable getWallpaper() {
return null;
}
@Override
public Drawable peekWallpaper() {
return null;
}
@Override
public int getWallpaperDesiredMinimumWidth() {
return 0;
}
@Override
public int getWallpaperDesiredMinimumHeight() {
return 0;
}
@Override
public void setWallpaper(Bitmap bitmap) throws IOException {
}
@Override
public void setWallpaper(InputStream data) throws IOException {
}
@Override
public void clearWallpaper() throws IOException {
}
@Override
public void startActivity(Intent intent) {
}
@Override
public void startActivity(Intent intent, @Nullable Bundle options) {
}
@Override
public void startActivities(Intent[] intents) {
}
@Override
public void startActivities(Intent[] intents, Bundle options) {
}
@Override
public void startIntentSender(IntentSender intent, @Nullable Intent fillInIntent, int flagsMask, int flagsValues, int extraFlags) throws IntentSender.SendIntentException {
}
@Override
public void startIntentSender(IntentSender intent, @Nullable Intent fillInIntent, int flagsMask, int flagsValues, int extraFlags, @Nullable Bundle options) throws IntentSender.SendIntentException {
}
@Override
public void sendBroadcast(Intent intent) {
}
@Override
public void sendBroadcast(Intent intent, @Nullable String receiverPermission) {
}
@Override
public void sendOrderedBroadcast(Intent intent, @Nullable String receiverPermission) {
}
@Override
public void sendOrderedBroadcast(@NonNull Intent intent, @Nullable String receiverPermission, @Nullable BroadcastReceiver resultReceiver, @Nullable Handler scheduler, int initialCode, @Nullable String initialData, @Nullable Bundle initialExtras) {
}
@Override
public void sendBroadcastAsUser(Intent intent, UserHandle user) {
}
@Override
public void sendBroadcastAsUser(Intent intent, UserHandle user, @Nullable String receiverPermission) {
}
@Override
public void sendOrderedBroadcastAsUser(Intent intent, UserHandle user, @Nullable String receiverPermission, BroadcastReceiver resultReceiver, @Nullable Handler scheduler, int initialCode, @Nullable String initialData, @Nullable Bundle initialExtras) {
}
@Override
public void sendStickyBroadcast(Intent intent) {
}
@Override
public void sendStickyOrderedBroadcast(Intent intent, BroadcastReceiver resultReceiver, @Nullable Handler scheduler, int initialCode, @Nullable String initialData, @Nullable Bundle initialExtras) {
}
@Override
public void removeStickyBroadcast(Intent intent) {
}
@Override
public void sendStickyBroadcastAsUser(Intent intent, UserHandle user) {
}
@Override
public void sendStickyOrderedBroadcastAsUser(Intent intent, UserHandle user, BroadcastReceiver resultReceiver, @Nullable Handler scheduler, int initialCode, @Nullable String initialData, @Nullable Bundle initialExtras) {
}
@Override
public void removeStickyBroadcastAsUser(Intent intent, UserHandle user) {
}
@Nullable
@Override
public Intent registerReceiver(@Nullable BroadcastReceiver receiver, IntentFilter filter) {
return null;
}
@Nullable
@Override
public Intent registerReceiver(@Nullable BroadcastReceiver receiver, IntentFilter filter, int flags) {
return null;
}
@Nullable
@Override
public Intent registerReceiver(BroadcastReceiver receiver, IntentFilter filter, @Nullable String broadcastPermission, @Nullable Handler scheduler) {
return null;
}
@Nullable
@Override
public Intent registerReceiver(BroadcastReceiver receiver, IntentFilter filter, @Nullable String broadcastPermission, @Nullable Handler scheduler, int flags) {
return null;
}
@Override
public void unregisterReceiver(BroadcastReceiver receiver) {
}
@Nullable
@Override
public ComponentName startService(Intent service) {
return null;
}
@Nullable
@Override
public ComponentName startForegroundService(Intent service) {
return null;
}
@Override
public boolean stopService(Intent service) {
return false;
}
@Override
public boolean bindService(Intent service, @NonNull ServiceConnection conn, int flags) {
return false;
}
@Override
public void unbindService(@NonNull ServiceConnection conn) {
}
@Override
public boolean startInstrumentation(@NonNull ComponentName className, @Nullable String profileFile, @Nullable Bundle arguments) {
return false;
}
@Override
public Object getSystemService(@NonNull String name) {
return null;
}
@Nullable
@Override
public String getSystemServiceName(@NonNull Class<?> serviceClass) {
return null;
}
@Override
public int checkPermission(@NonNull String permission, int pid, int uid) {
return PackageManager.PERMISSION_GRANTED;
}
@Override
public int checkCallingPermission(@NonNull String permission) {
return PackageManager.PERMISSION_GRANTED;
}
@Override
public int checkCallingOrSelfPermission(@NonNull String permission) {
return PackageManager.PERMISSION_GRANTED;
}
@Override
public int checkSelfPermission(@NonNull String permission) {
return PackageManager.PERMISSION_GRANTED;
}
@Override
public void enforcePermission(@NonNull String permission, int pid, int uid, @Nullable String message) {
}
@Override
public void enforceCallingPermission(@NonNull String permission, @Nullable String message) {
}
@Override
public void enforceCallingOrSelfPermission(@NonNull String permission, @Nullable String message) {
}
@Override
public void grantUriPermission(String toPackage, Uri uri, int modeFlags) {
}
@Override
public void revokeUriPermission(Uri uri, int modeFlags) {
}
@Override
public void revokeUriPermission(String toPackage, Uri uri, int modeFlags) {
}
@Override
public int checkUriPermission(Uri uri, int pid, int uid, int modeFlags) {
return PackageManager.PERMISSION_GRANTED;
}
@Override
public int checkCallingUriPermission(Uri uri, int modeFlags) {
return PackageManager.PERMISSION_GRANTED;
}
@Override
public int checkCallingOrSelfUriPermission(Uri uri, int modeFlags) {
return PackageManager.PERMISSION_GRANTED;
}
@Override
public int checkUriPermission(@Nullable Uri uri, @Nullable String readPermission, @Nullable String writePermission, int pid, int uid, int modeFlags) {
return PackageManager.PERMISSION_GRANTED;
}
@Override
public void enforceUriPermission(Uri uri, int pid, int uid, int modeFlags, String message) {
}
@Override
public void enforceCallingUriPermission(Uri uri, int modeFlags, String message) {
}
@Override
public void enforceCallingOrSelfUriPermission(Uri uri, int modeFlags, String message) {
}
@Override
public void enforceUriPermission(@Nullable Uri uri, @Nullable String readPermission, @Nullable String writePermission, int pid, int uid, int modeFlags, @Nullable String message) {
}
@Override
public Context createPackageContext(String packageName, int flags) throws PackageManager.NameNotFoundException {
return null;
}
@Override
public Context createContextForSplit(String splitName) throws PackageManager.NameNotFoundException {
return null;
}
@Override
public Context createConfigurationContext(@NonNull Configuration overrideConfiguration) {
return null;
}
@Override
public Context createDisplayContext(@NonNull Display display) {
return null;
}
@Override
public Context createDeviceProtectedStorageContext() {
return null;
}
@Override
public boolean isDeviceProtectedStorage() {
return false;
}
}
|
/*
* Copyright 2016-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.finspace.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/finspace-2021-03-12/ListTagsForResource" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListTagsForResourceRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The Amazon Resource Name of the resource.
* </p>
*/
private String resourceArn;
/**
* <p>
* The Amazon Resource Name of the resource.
* </p>
*
* @param resourceArn
* The Amazon Resource Name of the resource.
*/
public void setResourceArn(String resourceArn) {
this.resourceArn = resourceArn;
}
/**
* <p>
* The Amazon Resource Name of the resource.
* </p>
*
* @return The Amazon Resource Name of the resource.
*/
public String getResourceArn() {
return this.resourceArn;
}
/**
* <p>
* The Amazon Resource Name of the resource.
* </p>
*
* @param resourceArn
* The Amazon Resource Name of the resource.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTagsForResourceRequest withResourceArn(String resourceArn) {
setResourceArn(resourceArn);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getResourceArn() != null)
sb.append("ResourceArn: ").append(getResourceArn());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListTagsForResourceRequest == false)
return false;
ListTagsForResourceRequest other = (ListTagsForResourceRequest) obj;
if (other.getResourceArn() == null ^ this.getResourceArn() == null)
return false;
if (other.getResourceArn() != null && other.getResourceArn().equals(this.getResourceArn()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getResourceArn() == null) ? 0 : getResourceArn().hashCode());
return hashCode;
}
@Override
public ListTagsForResourceRequest clone() {
return (ListTagsForResourceRequest) super.clone();
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.arrow.vector.types.pojo;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.SeekableByteChannel;
import java.nio.channels.WritableByteChannel;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.Collections;
import java.util.UUID;
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.memory.RootAllocator;
import org.apache.arrow.vector.ExtensionTypeVector;
import org.apache.arrow.vector.FieldVector;
import org.apache.arrow.vector.FixedSizeBinaryVector;
import org.apache.arrow.vector.ValueVector;
import org.apache.arrow.vector.VectorSchemaRoot;
import org.apache.arrow.vector.ipc.ArrowFileReader;
import org.apache.arrow.vector.ipc.ArrowFileWriter;
import org.apache.arrow.vector.types.pojo.ArrowType.ExtensionType;
import org.junit.Assert;
import org.junit.Test;
public class TestExtensionType {
/**
* Test that a custom UUID type can be round-tripped through a temporary file.
*/
@Test
public void roundtripUuid() throws IOException {
ExtensionTypeRegistry.register(new UuidType());
final Schema schema = new Schema(Collections.singletonList(Field.nullable("a", new UuidType())));
try (final BufferAllocator allocator = new RootAllocator(Integer.MAX_VALUE);
final VectorSchemaRoot root = VectorSchemaRoot.create(schema, allocator)) {
UUID u1 = UUID.randomUUID();
UUID u2 = UUID.randomUUID();
UuidVector vector = (UuidVector) root.getVector("a");
vector.setValueCount(2);
vector.set(0, u1);
vector.set(1, u2);
root.setRowCount(2);
final File file = File.createTempFile("uuidtest", ".arrow");
try (final WritableByteChannel channel = FileChannel
.open(Paths.get(file.getAbsolutePath()), StandardOpenOption.WRITE);
final ArrowFileWriter writer = new ArrowFileWriter(root, null, channel)) {
writer.start();
writer.writeBatch();
writer.end();
}
try (final SeekableByteChannel channel = Files.newByteChannel(Paths.get(file.getAbsolutePath()));
final ArrowFileReader reader = new ArrowFileReader(channel, allocator)) {
reader.loadNextBatch();
final VectorSchemaRoot readerRoot = reader.getVectorSchemaRoot();
Assert.assertEquals(root.getSchema(), readerRoot.getSchema());
final Field field = readerRoot.getSchema().getFields().get(0);
final UuidType expectedType = new UuidType();
Assert.assertEquals(field.getMetadata().get(ExtensionType.EXTENSION_METADATA_KEY_NAME),
expectedType.extensionName());
Assert.assertEquals(field.getMetadata().get(ExtensionType.EXTENSION_METADATA_KEY_METADATA),
expectedType.serialize());
final ExtensionTypeVector deserialized = (ExtensionTypeVector) readerRoot.getFieldVectors().get(0);
Assert.assertEquals(vector.getValueCount(), deserialized.getValueCount());
for (int i = 0; i < vector.getValueCount(); i++) {
Assert.assertEquals(vector.isNull(i), deserialized.isNull(i));
if (!vector.isNull(i)) {
Assert.assertEquals(vector.getObject(i), deserialized.getObject(i));
}
}
}
}
}
/**
* Test that a custom UUID type can be read as its underlying type.
*/
@Test
public void readUnderlyingType() throws IOException {
ExtensionTypeRegistry.register(new UuidType());
final Schema schema = new Schema(Collections.singletonList(Field.nullable("a", new UuidType())));
try (final BufferAllocator allocator = new RootAllocator(Integer.MAX_VALUE);
final VectorSchemaRoot root = VectorSchemaRoot.create(schema, allocator)) {
UUID u1 = UUID.randomUUID();
UUID u2 = UUID.randomUUID();
UuidVector vector = (UuidVector) root.getVector("a");
vector.setValueCount(2);
vector.set(0, u1);
vector.set(1, u2);
root.setRowCount(2);
final File file = File.createTempFile("uuidtest", ".arrow");
try (final WritableByteChannel channel = FileChannel
.open(Paths.get(file.getAbsolutePath()), StandardOpenOption.WRITE);
final ArrowFileWriter writer = new ArrowFileWriter(root, null, channel)) {
writer.start();
writer.writeBatch();
writer.end();
}
ExtensionTypeRegistry.unregister(new UuidType());
try (final SeekableByteChannel channel = Files.newByteChannel(Paths.get(file.getAbsolutePath()));
final ArrowFileReader reader = new ArrowFileReader(channel, allocator)) {
reader.loadNextBatch();
final VectorSchemaRoot readerRoot = reader.getVectorSchemaRoot();
Assert.assertEquals(1, readerRoot.getSchema().getFields().size());
Assert.assertEquals("a", readerRoot.getSchema().getFields().get(0).getName());
Assert.assertTrue(readerRoot.getSchema().getFields().get(0).getType() instanceof ArrowType.FixedSizeBinary);
Assert.assertEquals(16,
((ArrowType.FixedSizeBinary) readerRoot.getSchema().getFields().get(0).getType()).getByteWidth());
final Field field = readerRoot.getSchema().getFields().get(0);
final UuidType expectedType = new UuidType();
Assert.assertEquals(field.getMetadata().get(ExtensionType.EXTENSION_METADATA_KEY_NAME),
expectedType.extensionName());
Assert.assertEquals(field.getMetadata().get(ExtensionType.EXTENSION_METADATA_KEY_METADATA),
expectedType.serialize());
final FixedSizeBinaryVector deserialized = (FixedSizeBinaryVector) readerRoot.getFieldVectors().get(0);
Assert.assertEquals(vector.getValueCount(), deserialized.getValueCount());
for (int i = 0; i < vector.getValueCount(); i++) {
Assert.assertEquals(vector.isNull(i), deserialized.isNull(i));
if (!vector.isNull(i)) {
final UUID uuid = vector.getObject(i);
final ByteBuffer bb = ByteBuffer.allocate(16);
bb.putLong(uuid.getMostSignificantBits());
bb.putLong(uuid.getLeastSignificantBits());
Assert.assertArrayEquals(bb.array(), deserialized.get(i));
}
}
}
}
}
static class UuidType extends ExtensionType {
@Override
public ArrowType storageType() {
return new ArrowType.FixedSizeBinary(16);
}
@Override
public String extensionName() {
return "uuid";
}
@Override
public boolean extensionEquals(ExtensionType other) {
return other instanceof UuidType;
}
@Override
public ArrowType deserialize(ArrowType storageType, String serializedData) {
if (!storageType.equals(storageType())) {
throw new UnsupportedOperationException("Cannot construct UuidType from underlying type " + storageType);
}
return new UuidType();
}
@Override
public String serialize() {
return "";
}
@Override
public FieldVector getNewVector(String name, FieldType fieldType, BufferAllocator allocator) {
return new UuidVector(name, allocator, new FixedSizeBinaryVector(name, allocator, 16));
}
}
static class UuidVector extends ExtensionTypeVector<FixedSizeBinaryVector> {
public UuidVector(String name, BufferAllocator allocator, FixedSizeBinaryVector underlyingVector) {
super(name, allocator, underlyingVector);
}
@Override
public UUID getObject(int index) {
final ByteBuffer bb = ByteBuffer.wrap(getUnderlyingVector().getObject(index));
return new UUID(bb.getLong(), bb.getLong());
}
@Override
public int hashCode(int index) {
return getUnderlyingVector().hashCode(index);
}
@Override
public boolean equals(int index, ValueVector to, int toIndex) {
return getUnderlyingVector().equals(index, to, toIndex);
}
public void set(int index, UUID uuid) {
ByteBuffer bb = ByteBuffer.allocate(16);
bb.putLong(uuid.getMostSignificantBits());
bb.putLong(uuid.getLeastSignificantBits());
getUnderlyingVector().set(index, bb.array());
}
}
}
|
package com.berkgokden.chess.pieces;
import com.berkgokden.chess.Piece;
import com.berkgokden.chess.PieceType;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Tests the methods of Knight Piece.
*/
public class KnightTest {
@Test
public void shouldPassWhenPieceTypeReturnsAsKnight() throws Exception {
assertEquals(PieceType.KNIGHT, new Knight().getType());
}
@Test
public void shouldPassWhentIsSafeWorksCorrectly() throws Exception {
Piece piece = new Knight();
piece.setX(2);
piece.setY(2);
// Vertically safe
Piece piece1 = new DummyPiece();
piece1.setX(2);
piece1.setY(0);
assertTrue(piece.isSafe(piece1));
// Horizontally safe
piece1.setX(0);
piece1.setY(2);
assertTrue(piece.isSafe(piece1));
// Diagonally safe
piece1.setX(4);
piece1.setY(4);
assertTrue(piece.isSafe(piece1));
piece1.setX(0);
piece1.setY(4);
assertTrue(piece.isSafe(piece1));
// L shape not safe
piece1.setX(3);
piece1.setY(4);
assertFalse(piece.isSafe(piece1));
// L shape not safe
piece1.setX(1);
piece1.setY(4);
assertFalse(piece.isSafe(piece1));
}
}
|
/**
* Copyright (c) 2017 Dell Inc., or its subsidiaries. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*/
package io.pravega.segmentstore.server.store;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.pravega.common.Exceptions;
import io.pravega.common.ObjectClosedException;
import io.pravega.common.TimeoutTimer;
import io.pravega.common.concurrent.Futures;
import io.pravega.common.io.StreamHelpers;
import io.pravega.common.util.Retry;
import io.pravega.segmentstore.contracts.AttributeUpdate;
import io.pravega.segmentstore.contracts.AttributeUpdateType;
import io.pravega.segmentstore.contracts.Attributes;
import io.pravega.segmentstore.contracts.ContainerNotFoundException;
import io.pravega.segmentstore.contracts.ReadResult;
import io.pravega.segmentstore.contracts.ReadResultEntry;
import io.pravega.segmentstore.contracts.ReadResultEntryContents;
import io.pravega.segmentstore.contracts.ReadResultEntryType;
import io.pravega.segmentstore.contracts.SegmentProperties;
import io.pravega.segmentstore.contracts.StreamSegmentInformation;
import io.pravega.segmentstore.contracts.StreamSegmentNotExistsException;
import io.pravega.segmentstore.contracts.StreamSegmentStore;
import io.pravega.segmentstore.contracts.StreamSegmentTruncatedException;
import io.pravega.segmentstore.server.IllegalContainerStateException;
import io.pravega.segmentstore.server.containers.ContainerConfig;
import io.pravega.segmentstore.server.logs.DurableLogConfig;
import io.pravega.segmentstore.server.reading.ReadIndexConfig;
import io.pravega.segmentstore.server.writer.WriterConfig;
import io.pravega.segmentstore.storage.DataLogWriterNotPrimaryException;
import io.pravega.shared.protocol.netty.ByteBufWrapper;
import io.pravega.shared.segment.StreamSegmentNameUtils;
import io.pravega.test.common.AssertExtensions;
import io.pravega.test.common.ThreadPooledTestSuite;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;
import java.util.concurrent.CancellationException;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import lombok.Cleanup;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import org.junit.Assert;
import org.junit.Test;
/**
* Base class for any test that verifies the functionality of a StreamSegmentStore class.
*/
@Slf4j
public abstract class StreamSegmentStoreTestBase extends ThreadPooledTestSuite {
//region Test Configuration
// Even though this should work with just 1-2 threads, doing so would cause this test to run for a long time. Choosing
// a decent size so that the tests do finish up within a few seconds.
private static final int THREADPOOL_SIZE_SEGMENT_STORE = 20;
private static final int THREADPOOL_SIZE_SEGMENT_STORE_STORAGE = 10;
private static final int THREADPOOL_SIZE_TEST = 3;
private static final String EMPTY_SEGMENT_NAME = "Empty_Segment";
private static final int SEGMENT_COUNT = 10;
private static final int TRANSACTIONS_PER_SEGMENT = 1;
private static final int APPENDS_PER_SEGMENT = 100;
private static final int ATTRIBUTE_UPDATES_PER_SEGMENT = 100;
private static final int MAX_INSTANCE_COUNT = 4;
private static final List<UUID> ATTRIBUTES = Arrays.asList(Attributes.EVENT_COUNT, UUID.randomUUID(), UUID.randomUUID());
private static final int ATTRIBUTE_UPDATE_DELTA = APPENDS_PER_SEGMENT + ATTRIBUTE_UPDATES_PER_SEGMENT;
private static final Duration TIMEOUT = Duration.ofSeconds(120);
protected final ServiceBuilderConfig.Builder configBuilder = ServiceBuilderConfig
.builder()
.include(ServiceConfig
.builder()
.with(ServiceConfig.CONTAINER_COUNT, 4)
.with(ServiceConfig.THREAD_POOL_SIZE, THREADPOOL_SIZE_SEGMENT_STORE)
.with(ServiceConfig.STORAGE_THREAD_POOL_SIZE, THREADPOOL_SIZE_SEGMENT_STORE_STORAGE)
.with(ServiceConfig.CACHE_POLICY_MAX_SIZE, 64 * 1024 * 1024L)
.with(ServiceConfig.CACHE_POLICY_MAX_TIME, 30))
.include(ContainerConfig
.builder()
.with(ContainerConfig.SEGMENT_METADATA_EXPIRATION_SECONDS, ContainerConfig.MINIMUM_SEGMENT_METADATA_EXPIRATION_SECONDS))
.include(DurableLogConfig
.builder()
.with(DurableLogConfig.CHECKPOINT_MIN_COMMIT_COUNT, 10)
.with(DurableLogConfig.CHECKPOINT_COMMIT_COUNT, 100)
.with(DurableLogConfig.CHECKPOINT_TOTAL_COMMIT_LENGTH, 10 * 1024 * 1024L))
.include(ReadIndexConfig
.builder()
.with(ReadIndexConfig.MEMORY_READ_MIN_LENGTH, 512) // Need this for truncation testing.
.with(ReadIndexConfig.STORAGE_READ_ALIGNMENT, 1024))
.include(WriterConfig
.builder()
.with(WriterConfig.FLUSH_THRESHOLD_BYTES, 1)
.with(WriterConfig.FLUSH_THRESHOLD_MILLIS, 25L)
.with(WriterConfig.MIN_READ_TIMEOUT_MILLIS, 10L)
.with(WriterConfig.MAX_READ_TIMEOUT_MILLIS, 250L));
@Override
protected int getThreadPoolSize() {
return THREADPOOL_SIZE_TEST;
}
/**
* When overridden in a derived class, this will return a multiplier applied to APPENDS_PER_SEGMENT and
* ATTRIBUTE_COUNT_PER_SEGMENT that will be used for the fencing test. For non-memory tests, executing too many
* operations (in sequence, like the test does) will cause the test to run for too long, hence a need to be able to
* reduce this if needed.
*/
protected double getFencingTestOperationMultiplier() {
return 1.0;
}
/**
* When overridden in a derived class, this will indicate whether we want to execute a new set of Segment Appends
* after we have merged transactions into them. Default is true, but some tests may take longer to execute so this
* can be disabled for those.
*
* @return True if {@link #testEndToEnd()} should append data after merging transactions, false otherwise.
*/
protected boolean appendAfterMerging() {
return true;
}
//endregion
/**
* Tests an end-to-end scenario for the SegmentStore, utilizing a read-write SegmentStore for making modifications
* (writes, seals, creates, etc.) and a ReadOnlySegmentStore to verify the changes being persisted into Storage.
* * Appends
* * Reads
* * Segment and transaction creation
* * Transaction mergers
* * Recovery
*
* @throws Exception If an exception occurred.
*/
@Test
public void testEndToEnd() throws Exception {
endToEndProcess(true);
}
/**
* End to end test to verify segment store process.
*
* @param verifySegmentContent whether it's needed to read segment content for verification.
* @throws Exception If an exception occurred.
*/
void endToEndProcess(boolean verifySegmentContent) throws Exception {
ArrayList<String> segmentNames;
HashMap<String, ArrayList<String>> transactionsBySegment;
HashMap<String, Long> lengths = new HashMap<>();
ArrayList<ByteBuf> appendBuffers = new ArrayList<>();
HashMap<String, Long> startOffsets = new HashMap<>();
HashMap<String, ByteArrayOutputStream> segmentContents = new HashMap<>();
long expectedAttributeValue = 0;
int instanceId = 0;
// Phase 1: Create segments and add some appends.
log.info("Starting Phase 1.");
try (val builder = createBuilder(++instanceId)) {
val segmentStore = builder.createStreamSegmentService();
// Create the StreamSegments.
segmentNames = createSegments(segmentStore);
log.info("Created Segments: {}.", String.join(", ", segmentNames));
transactionsBySegment = createTransactions(segmentNames, segmentStore);
log.info("Created Transactions: {}.", transactionsBySegment.values().stream().flatMap(Collection::stream).collect(Collectors.joining(", ")));
// Add some appends.
ArrayList<String> segmentsAndTransactions = new ArrayList<>(segmentNames);
transactionsBySegment.values().forEach(segmentsAndTransactions::addAll);
appendData(segmentsAndTransactions, segmentContents, lengths, appendBuffers, segmentStore).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
expectedAttributeValue += ATTRIBUTE_UPDATE_DELTA;
log.info("Finished appending data.");
checkSegmentStatus(lengths, startOffsets, false, false, expectedAttributeValue, segmentStore);
log.info("Finished Phase 1");
}
// Verify all buffers have been released.
checkAppendLeaks(appendBuffers);
appendBuffers.clear();
// Phase 2: Force a recovery and merge all transactions.
log.info("Starting Phase 2.");
try (val builder = createBuilder(++instanceId)) {
val segmentStore = builder.createStreamSegmentService();
checkReads(segmentContents, segmentStore);
log.info("Finished checking reads.");
// Merge all transactions.
mergeTransactions(transactionsBySegment, lengths, segmentContents, segmentStore).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
log.info("Finished merging transactions.");
if (appendAfterMerging()) {
// Check the status now. A nice side effect of this is that it loads all extended attributes from Storage so
// that we can modify them in the next step (during appending).
checkSegmentStatus(lengths, startOffsets, false, false, expectedAttributeValue, segmentStore);
// Append more data.
appendData(segmentNames, segmentContents, lengths, appendBuffers, segmentStore).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
expectedAttributeValue += ATTRIBUTE_UPDATE_DELTA;
log.info("Finished appending after merging transactions.");
} else {
log.info("Skipped appending after merging transactions due to setting being disabled in this test.");
}
checkSegmentStatus(lengths, startOffsets, false, false, expectedAttributeValue, segmentStore);
log.info("Finished Phase 2.");
}
// Verify all buffers have been released.
checkAppendLeaks(appendBuffers);
appendBuffers.clear();
// Phase 3: Force a recovery, immediately check reads, then truncate and read at the same time.
log.info("Starting Phase 3.");
try (val builder = createBuilder(++instanceId);
val readOnlyBuilder = createReadOnlyBuilder(instanceId)) {
val segmentStore = builder.createStreamSegmentService();
val readOnlySegmentStore = readOnlyBuilder.createStreamSegmentService();
checkReads(segmentContents, segmentStore);
log.info("Finished checking reads.");
if (verifySegmentContent) {
// Wait for all the data to move to Storage.
waitForSegmentsInStorage(segmentNames, segmentStore, readOnlySegmentStore)
.get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
log.info("Finished waiting for segments in Storage.");
checkStorage(segmentContents, segmentStore, readOnlySegmentStore);
log.info("Finished Storage check.");
checkReadsWhileTruncating(segmentContents, startOffsets, segmentStore);
log.info("Finished checking reads while truncating.");
checkStorage(segmentContents, segmentStore, readOnlySegmentStore);
log.info("Finished Phase 3.");
}
}
// Phase 4: Force a recovery, seal segments and then delete them.
log.info("Starting Phase 4.");
try (val builder = createBuilder(++instanceId);
val readOnlyBuilder = createReadOnlyBuilder(instanceId)) {
val segmentStore = builder.createStreamSegmentService();
val readOnlySegmentStore = readOnlyBuilder.createStreamSegmentService();
// Seals.
sealSegments(segmentNames, segmentStore).get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
log.info("Finished sealing.");
checkSegmentStatus(lengths, startOffsets, true, false, expectedAttributeValue, segmentStore);
if (verifySegmentContent) {
waitForSegmentsInStorage(segmentNames, segmentStore, readOnlySegmentStore)
.get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
log.info("Finished waiting for segments in Storage.");
}
// Deletes.
deleteSegments(segmentNames, segmentStore).join();
log.info("Finished deleting segments.");
checkSegmentStatus(lengths, startOffsets, true, true, expectedAttributeValue, segmentStore);
log.info("Finished Phase 4.");
}
log.info("Finished.");
}
/**
* Tests an end-to-end scenario for the SegmentStore where operations are continuously executed while the SegmentStore
* itself is being fenced out by new instances. The difference between this and testEndToEnd() is that this does not
* do a graceful shutdown of the Segment Store, instead it creates a new instance while the previous one is still running.
*
* @throws Exception If an exception occurred.
*/
@Test
public void testEndToEndWithFencing() throws Exception {
endToEndProcessWithFencing(true);
}
/**
* End to end test to verify segment store process with fencing.
*
* @param verifySegmentContent whether it's needed to read segment content for verification.
* @throws Exception If an exception occurred.
*/
public void endToEndProcessWithFencing(boolean verifySegmentContent) throws Exception {
log.info("Starting.");
try (val context = new FencingTestContext()) {
// Create first instance (this is a one-off so we can bootstrap the test).
context.createNewInstance();
// Create the StreamSegments and their transactions.
val segmentNames = createSegments(context.getActiveStore());
val segmentsAndTransactions = new ArrayList<String>(segmentNames);
log.info("Created Segments: {}.", String.join(", ", segmentNames));
// Generate all the requests.
HashMap<String, Long> lengths = new HashMap<>();
HashMap<String, Long> startOffsets = new HashMap<>();
HashMap<String, ByteArrayOutputStream> segmentContents = new HashMap<>();
val appends = createAppendDataRequests(segmentsAndTransactions, segmentContents, lengths, null,
applyFencingMultiplier(ATTRIBUTE_UPDATES_PER_SEGMENT), applyFencingMultiplier(APPENDS_PER_SEGMENT));
val requests = appends.iterator();
// Calculate how frequently to create a new instance of the Segment Store.
int newInstanceFrequency = appends.size() / applyFencingMultiplier(MAX_INSTANCE_COUNT);
log.info("Creating a new Segment Store instance every {} operations.", newInstanceFrequency);
// Execute all the requests.
val operationCompletions = executeWithFencing(requests, newInstanceFrequency, context);
// Wait for our operations to complete.
operationCompletions.get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
// Wait for the instance creations to be done (this will help surface any exceptions coming from this).
context.awaitAllInitializations().get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
if (verifySegmentContent) {
// Check reads.
checkReads(segmentContents, context.getActiveStore());
log.info("Finished checking reads.");
try (val readOnlyBuilder = createReadOnlyBuilder(Integer.MAX_VALUE - 1)) {
waitForSegmentsInStorage(segmentNames, context.getActiveStore(), readOnlyBuilder.createStreamSegmentService())
.get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
log.info("Finished waiting for segments in Storage.");
}
}
// Delete everything.
deleteSegments(segmentNames, context.getActiveStore()).join();
log.info("Finished deleting segments.");
checkSegmentStatus(lengths, startOffsets, true, true, ATTRIBUTE_UPDATE_DELTA, context.getActiveStore());
}
log.info("Finished.");
}
//region Helpers
private ServiceBuilder createBuilder(int instanceId) throws Exception {
val builder = createBuilder(this.configBuilder, instanceId);
try {
builder.initialize();
} catch (Throwable ex) {
builder.close();
throw ex;
}
return builder;
}
/**
* When overridden in a derived class, creates a ServiceBuilder using the given configuration.
*
* @param builderConfig The configuration to use.
* @param instanceId The Id of the ServiceBuilder to create. For least interference, these should be unique.
* @return The ServiceBuilder.
*/
protected abstract ServiceBuilder createBuilder(ServiceBuilderConfig.Builder builderConfig, int instanceId);
private ServiceBuilder createReadOnlyBuilder(int instanceId) throws Exception {
// Copy base config properties to a new object.
val props = new Properties();
this.configBuilder.build().forEach(props::put);
// Create a new config (so we don't alter the base one) and set the ReadOnlySegmentStore to true).
val configBuilder = ServiceBuilderConfig.builder()
.include(props)
.include(ServiceConfig.builder()
.with(ServiceConfig.READONLY_SEGMENT_STORE, true));
val builder = createBuilder(configBuilder, instanceId);
builder.initialize();
return builder;
}
private ArrayList<StoreRequest> createAppendDataRequests(
Collection<String> segmentNames, HashMap<String, ByteArrayOutputStream> segmentContents, HashMap<String, Long> lengths, List<ByteBuf> appendBuffers) {
return createAppendDataRequests(segmentNames, segmentContents, lengths, appendBuffers, ATTRIBUTE_UPDATES_PER_SEGMENT, APPENDS_PER_SEGMENT);
}
private ArrayList<StoreRequest> createAppendDataRequests(
Collection<String> segmentNames, HashMap<String, ByteArrayOutputStream> segmentContents, HashMap<String, Long> lengths,
List<ByteBuf> appendBuffers, int attributeUpdatesPerSegment, int appendsPerSegment) {
val result = new ArrayList<StoreRequest>();
val halfAttributeCount = attributeUpdatesPerSegment / 2;
for (String segmentName : segmentNames) {
if (isEmptySegment(segmentName)) {
continue;
}
// Add half the attribute updates now.
for (int i = 0; i < halfAttributeCount; i++) {
result.add(store -> store.updateAttributes(segmentName, createAttributeUpdates(), TIMEOUT));
}
// Add some appends.
for (int i = 0; i < appendsPerSegment; i++) {
byte[] appendData = getAppendData(segmentName, i);
lengths.put(segmentName, lengths.getOrDefault(segmentName, 0L) + appendData.length);
recordAppend(segmentName, appendData, segmentContents);
// Use Netty ByteBuf here - this mimics the behavior of AppendProcessor.
ByteBuf buf = Unpooled.wrappedBuffer(appendData);
result.add(store -> Futures.toVoid(store.append(segmentName, new ByteBufWrapper(buf), createAttributeUpdates(), TIMEOUT)));
if (appendBuffers != null) {
appendBuffers.add(buf);
}
}
// Add the rest of the attribute updates.
for (int i = 0; i < halfAttributeCount; i++) {
result.add(store -> store.updateAttributes(segmentName, createAttributeUpdates(), TIMEOUT));
}
}
return result;
}
private CompletableFuture<Void> appendData(Collection<String> segmentNames, HashMap<String, ByteArrayOutputStream> segmentContents,
HashMap<String, Long> lengths, List<ByteBuf> appendBuffers, StreamSegmentStore store) {
return execute(createAppendDataRequests(segmentNames, segmentContents, lengths, appendBuffers), store);
}
private Collection<AttributeUpdate> createAttributeUpdates() {
return ATTRIBUTES.stream()
.map(id -> new AttributeUpdate(id, AttributeUpdateType.Accumulate, 1))
.collect(Collectors.toList());
}
private ArrayList<StoreRequest> createMergeTransactionsRequests(
HashMap<String, ArrayList<String>> transactionsBySegment, HashMap<String, Long> lengths,
HashMap<String, ByteArrayOutputStream> segmentContents) throws Exception {
val result = new ArrayList<StoreRequest>();
for (Map.Entry<String, ArrayList<String>> e : transactionsBySegment.entrySet()) {
String parentName = e.getKey();
for (String transactionName : e.getValue()) {
result.add(store -> Futures.toVoid(store.mergeStreamSegment(parentName, transactionName, TIMEOUT)));
// Update parent length.
lengths.put(parentName, lengths.get(parentName) + lengths.get(transactionName));
lengths.remove(transactionName);
// Update parent contents.
segmentContents.get(parentName).write(segmentContents.get(transactionName).toByteArray());
segmentContents.remove(transactionName);
}
}
return result;
}
private CompletableFuture<Void> mergeTransactions(HashMap<String, ArrayList<String>> transactionsBySegment, HashMap<String, Long> lengths,
HashMap<String, ByteArrayOutputStream> segmentContents, StreamSegmentStore store) throws Exception {
return execute(createMergeTransactionsRequests(transactionsBySegment, lengths, segmentContents), store);
}
private ArrayList<StoreRequest> createSealSegmentsRequests(Collection<String> segmentNames) {
val result = new ArrayList<StoreRequest>();
for (String segmentName : segmentNames) {
result.add(store -> Futures.toVoid(store.sealStreamSegment(segmentName, TIMEOUT)));
}
return result;
}
private CompletableFuture<Void> sealSegments(Collection<String> segmentNames, StreamSegmentStore store) {
return execute(createSealSegmentsRequests(segmentNames), store);
}
private CompletableFuture<Void> execute(ArrayList<StoreRequest> requests, StreamSegmentStore store) {
return Futures.allOf(requests.stream().map(r -> r.apply(store)).collect(Collectors.toList()));
}
/**
* Executes all the requests asynchronously, one by one, on the given FencingTextContext.
*/
private CompletableFuture<Void> executeWithFencing(Iterator<StoreRequest> requests, int newInstanceFrequency, FencingTestContext context) {
AtomicInteger index = new AtomicInteger();
return Futures.loop(
requests::hasNext,
() -> {
// Create a new Segment Store instance if we need to.
if (index.incrementAndGet() % newInstanceFrequency == 0) {
context.createNewInstanceAsync();
}
return executeWithFencing(requests.next(), index.get(), context);
},
executorService());
}
/**
* Executes the given request on the given FencingTextContext.. We retry all expected exceptions, and when we do, we
* make sure to execute them on the current (active) Segment Store instance (since the previous one may be unusable).
*/
private CompletableFuture<Void> executeWithFencing(StoreRequest request, int index, FencingTestContext context) {
log.debug("Initiating Operation #{} on iteration {}.", index, context.getIteration());
AtomicReference<StreamSegmentStore> requestStore = new AtomicReference<>(context.getActiveStore());
return Retry.withExpBackoff(50, 2, 10, TIMEOUT.toMillis() / 10)
.retryWhen(ex -> {
requestStore.getAndSet(context.getActiveStore());
ex = Exceptions.unwrap(ex);
log.info("Operation #{} (Iteration = {}) failed due to {}.", index, context.getIteration(), ex.toString());
return isExpectedFencingException(ex);
})
.runAsync(() -> request.apply(requestStore.get()), executorService());
}
private CompletableFuture<Void> deleteSegments(Collection<String> segmentNames, StreamSegmentStore store) {
val result = new ArrayList<CompletableFuture<Void>>();
for (String segmentName : segmentNames) {
result.add(store.deleteStreamSegment(segmentName, TIMEOUT));
}
return Futures.allOf(result);
}
private ArrayList<String> createSegments(StreamSegmentStore store) {
ArrayList<String> segmentNames = new ArrayList<>();
ArrayList<CompletableFuture<Void>> futures = new ArrayList<>();
for (int i = 0; i < SEGMENT_COUNT; i++) {
String segmentName = getSegmentName(i);
segmentNames.add(segmentName);
futures.add(store.createStreamSegment(segmentName, null, TIMEOUT));
}
futures.add(store.createStreamSegment(EMPTY_SEGMENT_NAME, null, TIMEOUT));
Futures.allOf(futures).join();
return segmentNames;
}
private HashMap<String, ArrayList<String>> createTransactions(Collection<String> segmentNames, StreamSegmentStore store) {
// Create the Transactions and collect their names.
ArrayList<CompletableFuture<Void>> futures = new ArrayList<>();
HashMap<String, ArrayList<String>> transactions = new HashMap<>();
for (String segmentName : segmentNames) {
if (isEmptySegment(segmentName)) {
continue;
}
val txnList = new ArrayList<String>(TRANSACTIONS_PER_SEGMENT);
transactions.put(segmentName, txnList);
for (int i = 0; i < TRANSACTIONS_PER_SEGMENT; i++) {
String txnName = StreamSegmentNameUtils.getTransactionNameFromId(segmentName, UUID.randomUUID());
txnList.add(txnName);
futures.add(store.createStreamSegment(txnName, null, TIMEOUT));
}
}
Futures.allOf(futures).join();
return transactions;
}
private boolean isExpectedFencingException(Throwable ex) {
return ex instanceof DataLogWriterNotPrimaryException
|| ex instanceof IllegalContainerStateException
|| ex instanceof ContainerNotFoundException
|| ex instanceof ObjectClosedException
|| ex instanceof CancellationException;
}
private boolean isEmptySegment(String segmentName) {
return segmentName.equals(EMPTY_SEGMENT_NAME);
}
private byte[] getAppendData(String segmentName, int appendId) {
return String.format("%s_%d", segmentName, appendId).getBytes();
}
@SneakyThrows(IOException.class)
private void recordAppend(String segmentName, byte[] data, HashMap<String, ByteArrayOutputStream> segmentContents) {
ByteArrayOutputStream contents = segmentContents.getOrDefault(segmentName, null);
if (contents == null) {
contents = new ByteArrayOutputStream();
segmentContents.put(segmentName, contents);
}
contents.write(data);
}
private static String getSegmentName(int i) {
return "Segment_" + i;
}
private void checkSegmentStatus(HashMap<String, Long> segmentLengths, HashMap<String, Long> startOffsets,
boolean expectSealed, boolean expectDeleted, long expectedAttributeValue, StreamSegmentStore store) {
for (Map.Entry<String, Long> e : segmentLengths.entrySet()) {
String segmentName = e.getKey();
if (expectDeleted) {
AssertExtensions.assertSuppliedFutureThrows(
"Segment '" + segmentName + "' was not deleted.",
() -> store.getStreamSegmentInfo(segmentName, TIMEOUT),
ex -> ex instanceof StreamSegmentNotExistsException);
} else {
SegmentProperties sp = store.getStreamSegmentInfo(segmentName, TIMEOUT).join();
long expectedStartOffset = startOffsets.getOrDefault(segmentName, 0L);
long expectedLength = e.getValue();
Assert.assertEquals("Unexpected Start Offset for segment " + segmentName, expectedStartOffset, sp.getStartOffset());
Assert.assertEquals("Unexpected length for segment " + segmentName, expectedLength, sp.getLength());
Assert.assertEquals("Unexpected value for isSealed for segment " + segmentName, expectSealed, sp.isSealed());
Assert.assertFalse("Unexpected value for isDeleted for segment " + segmentName, sp.isDeleted());
// Check attributes.
val allAttributes = store.getAttributes(segmentName, ATTRIBUTES, true, TIMEOUT).join();
for (UUID attributeId : ATTRIBUTES) {
Assert.assertEquals("Unexpected attribute value from getAttributes().",
expectedAttributeValue, (long) allAttributes.getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
if (Attributes.isCoreAttribute(attributeId)) {
// Core attributes must always be available from getInfo
Assert.assertEquals("Unexpected core attribute value from getInfo().",
expectedAttributeValue, (long) sp.getAttributes().getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE));
} else {
val extAttrValue = sp.getAttributes().getOrDefault(attributeId, Attributes.NULL_ATTRIBUTE_VALUE);
Assert.assertTrue("Unexpected extended attribute value from getInfo()",
extAttrValue == Attributes.NULL_ATTRIBUTE_VALUE || extAttrValue == expectedAttributeValue);
}
}
}
}
}
private void checkReads(HashMap<String, ByteArrayOutputStream> segmentContents, StreamSegmentStore store) {
for (Map.Entry<String, ByteArrayOutputStream> e : segmentContents.entrySet()) {
String segmentName = e.getKey();
byte[] expectedData = e.getValue().toByteArray();
long segmentLength = store.getStreamSegmentInfo(segmentName, TIMEOUT).join().getLength();
Assert.assertEquals("Unexpected Read Index length for segment " + segmentName, expectedData.length, segmentLength);
AtomicLong expectedCurrentOffset = new AtomicLong(0);
// We retry a number of times on StreamSegmentNotExists. It is possible that waitForSegmentsInStorage may have
// returned successfully because it detected the Segment was complete there, but the internal callback to the
// ReadIndex (completeMerge) may not yet have been executed. The ReadIndex has a mechanism to cope with this,
// but it only retries once, after a fixed time interval, which is more than generous on any system.
// However, on very slow systems, it is possible that that callback may take a significant amount of time to even
// begin executing, hence the trying to read data that was merged from a Transaction may result in a spurious
// StreamSegmentNotExistsException.
// This is gracefully handled by retries in AppendProcessor and/or Client, but in this case, we simply have to
// do the retries ourselves, hoping that the callback eventually executes.
Retry.withExpBackoff(100, 2, 10, TIMEOUT.toMillis() / 5)
.retryWhen(ex -> Exceptions.unwrap(ex) instanceof StreamSegmentNotExistsException)
.run(() -> {
checkSegmentReads(segmentName, expectedCurrentOffset, segmentLength, store, expectedData);
return null;
});
}
}
private void checkSegmentReads(String segmentName, AtomicLong expectedCurrentOffset, long segmentLength, StreamSegmentStore store, byte[] expectedData) throws Exception {
@Cleanup
ReadResult readResult = store.read(segmentName, expectedCurrentOffset.get(), (int) (segmentLength - expectedCurrentOffset.get()), TIMEOUT).join();
Assert.assertTrue("Empty read result for segment " + segmentName, readResult.hasNext());
// A more thorough read check is done in StreamSegmentContainerTests; here we just check if the data was merged correctly.
while (readResult.hasNext()) {
ReadResultEntry readEntry = readResult.next();
AssertExtensions.assertGreaterThan("getRequestedReadLength should be a positive integer for segment " + segmentName,
0, readEntry.getRequestedReadLength());
Assert.assertEquals("Unexpected value from getStreamSegmentOffset for segment " + segmentName,
expectedCurrentOffset.get(), readEntry.getStreamSegmentOffset());
if (!readEntry.getContent().isDone()) {
readEntry.requestContent(TIMEOUT);
}
readEntry.getContent().get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
Assert.assertNotEquals("Unexpected value for isEndOfStreamSegment for non-sealed segment " + segmentName,
ReadResultEntryType.EndOfStreamSegment, readEntry.getType());
ReadResultEntryContents readEntryContents = readEntry.getContent().join();
byte[] actualData = new byte[readEntryContents.getLength()];
StreamHelpers.readAll(readEntryContents.getData(), actualData, 0, actualData.length);
AssertExtensions.assertArrayEquals("Unexpected data read from segment " + segmentName + " at offset " + expectedCurrentOffset,
expectedData, (int) expectedCurrentOffset.get(), actualData, 0, readEntryContents.getLength());
expectedCurrentOffset.addAndGet(readEntryContents.getLength());
}
Assert.assertTrue("ReadResult was not closed post-full-consumption for segment" + segmentName, readResult.isClosed());
}
private void checkReadsWhileTruncating(HashMap<String, ByteArrayOutputStream> segmentContents, HashMap<String, Long> startOffsets,
StreamSegmentStore store) throws Exception {
for (Map.Entry<String, ByteArrayOutputStream> e : segmentContents.entrySet()) {
String segmentName = e.getKey();
byte[] expectedData = e.getValue().toByteArray();
long segmentLength = store.getStreamSegmentInfo(segmentName, TIMEOUT).join().getLength();
long expectedCurrentOffset = 0;
boolean truncate = false;
while (expectedCurrentOffset < segmentLength) {
@Cleanup
ReadResult readResult = store.read(segmentName, expectedCurrentOffset, (int) (segmentLength - expectedCurrentOffset), TIMEOUT).join();
Assert.assertTrue("Empty read result for segment " + segmentName, readResult.hasNext());
// We only test the truncation-related pieces here; other read-related checks are done in checkReads.
while (readResult.hasNext()) {
ReadResultEntry readEntry = readResult.next();
Assert.assertEquals("Unexpected value from getStreamSegmentOffset for segment " + segmentName,
expectedCurrentOffset, readEntry.getStreamSegmentOffset());
if (!readEntry.getContent().isDone()) {
readEntry.requestContent(TIMEOUT);
}
if (readEntry.getType() == ReadResultEntryType.Truncated) {
long startOffset = startOffsets.getOrDefault(segmentName, 0L);
// Verify that the Segment actually is truncated beyond this offset.
AssertExtensions.assertLessThan("Found Truncated ReadResultEntry but current offset not truncated.",
startOffset, readEntry.getStreamSegmentOffset());
// Verify the ReadResultEntry cannot be used and throws an appropriate exception.
AssertExtensions.assertSuppliedFutureThrows(
"ReadEntry.getContent() did not throw for a Truncated entry.",
readEntry::getContent,
ex -> ex instanceof StreamSegmentTruncatedException);
// Verify ReadResult is done.
Assert.assertFalse("Unexpected result from ReadResult.hasNext when encountering truncated entry.",
readResult.hasNext());
// Verify attempting to read at the current offset will return the appropriate entry (and not throw).
@Cleanup
ReadResult truncatedResult = store.read(segmentName, readEntry.getStreamSegmentOffset(), 1, TIMEOUT).join();
val first = truncatedResult.next();
Assert.assertEquals("Read request for a truncated offset did not start with a Truncated ReadResultEntryType.",
ReadResultEntryType.Truncated, first.getType());
// Skip over until the first non-truncated offset.
expectedCurrentOffset = Math.max(expectedCurrentOffset, startOffset);
continue;
}
// Non-truncated entry; do the usual verifications.
readEntry.getContent().get(TIMEOUT.toMillis(), TimeUnit.MILLISECONDS);
Assert.assertNotEquals("Unexpected value for isEndOfStreamSegment for non-sealed segment " + segmentName,
ReadResultEntryType.EndOfStreamSegment, readEntry.getType());
ReadResultEntryContents readEntryContents = readEntry.getContent().join();
byte[] actualData = new byte[readEntryContents.getLength()];
StreamHelpers.readAll(readEntryContents.getData(), actualData, 0, actualData.length);
AssertExtensions.assertArrayEquals("Unexpected data read from segment " + segmentName + " at offset " + expectedCurrentOffset,
expectedData, (int) expectedCurrentOffset, actualData, 0, readEntryContents.getLength());
expectedCurrentOffset += readEntryContents.getLength();
// Every other read, determine if we should truncate or not.
if (truncate) {
long truncateOffset;
if (segmentName.hashCode() % 2 == 0) {
// Truncate just beyond the current read offset.
truncateOffset = Math.min(segmentLength, expectedCurrentOffset + 1);
} else {
// Truncate half of what we read so far.
truncateOffset = Math.min(segmentLength, expectedCurrentOffset / 2 + 1);
}
startOffsets.put(segmentName, truncateOffset);
store.truncateStreamSegment(segmentName, truncateOffset, TIMEOUT).join();
}
truncate = !truncate;
}
Assert.assertTrue("ReadResult was not closed post-full-consumption for segment" + segmentName, readResult.isClosed());
}
}
}
private static void checkStorage(HashMap<String, ByteArrayOutputStream> segmentContents, StreamSegmentStore baseStore,
StreamSegmentStore readOnlySegmentStore) throws Exception {
for (Map.Entry<String, ByteArrayOutputStream> e : segmentContents.entrySet()) {
String segmentName = e.getKey();
byte[] expectedData = e.getValue().toByteArray();
// 1. Deletion status
SegmentProperties sp = null;
try {
sp = baseStore.getStreamSegmentInfo(segmentName, TIMEOUT).join();
} catch (Exception ex) {
if (!(Exceptions.unwrap(ex) instanceof StreamSegmentNotExistsException)) {
throw ex;
}
}
if (sp == null) {
AssertExtensions.assertSuppliedFutureThrows(
"Segment is marked as deleted in SegmentStore but was not deleted in Storage " + segmentName,
() -> readOnlySegmentStore.getStreamSegmentInfo(segmentName, TIMEOUT),
ex -> ex instanceof StreamSegmentNotExistsException);
// No need to do other checks.
continue;
}
// 2. Seal Status
SegmentProperties storageProps = readOnlySegmentStore.getStreamSegmentInfo(segmentName, TIMEOUT).join();
Assert.assertEquals("Segment seal status disagree between Store and Storage for segment " + segmentName,
sp.isSealed(), storageProps.isSealed());
// 3. Contents.
SegmentProperties metadataProps = baseStore.getStreamSegmentInfo(segmentName, TIMEOUT).join();
Assert.assertEquals("Unexpected Storage length for segment " + segmentName, expectedData.length,
storageProps.getLength());
byte[] actualData = new byte[expectedData.length];
int actualLength = 0;
int expectedLength = actualData.length;
try {
@Cleanup
ReadResult readResult = readOnlySegmentStore.read(segmentName, 0, actualData.length, TIMEOUT).join();
actualLength = readResult.readRemaining(actualData, TIMEOUT);
} catch (Exception ex) {
ex = (Exception) Exceptions.unwrap(ex);
if (!(ex instanceof StreamSegmentTruncatedException) || metadataProps.getStartOffset() == 0) {
// We encountered an unexpected Exception, or a Truncated Segment which was not expected to be truncated.
throw ex;
}
// Read from the truncated point, except if the whole segment got truncated.
expectedLength = (int) (storageProps.getLength() - metadataProps.getStartOffset());
if (metadataProps.getStartOffset() < storageProps.getLength()) {
@Cleanup
ReadResult readResult = readOnlySegmentStore.read(segmentName, metadataProps.getStartOffset(),
expectedLength, TIMEOUT).join();
actualLength = readResult.readRemaining(actualData, TIMEOUT);
}
}
Assert.assertEquals("Unexpected number of bytes read from Storage for segment " + segmentName,
expectedLength, actualLength);
AssertExtensions.assertArrayEquals("Unexpected data written to storage for segment " + segmentName,
expectedData, expectedData.length - expectedLength, actualData, 0, expectedLength);
}
}
private void checkAppendLeaks(ArrayList<ByteBuf> buffers) {
// Release our reference to these buffers.
buffers.forEach(ByteBuf::release);
// Then verify nobody else still holds such a reference.
Assert.assertTrue("Memory Leak: At least one append buffer did not have its data released.",
buffers.stream().allMatch(r -> r.refCnt() == 0));
}
private CompletableFuture<Void> waitForSegmentsInStorage(Collection<String> segmentNames, StreamSegmentStore baseStore,
StreamSegmentStore readOnlyStore) {
ArrayList<CompletableFuture<Void>> segmentsCompletion = new ArrayList<>();
for (String segmentName : segmentNames) {
SegmentProperties sp = baseStore.getStreamSegmentInfo(segmentName, TIMEOUT).join();
segmentsCompletion.add(waitForSegmentInStorage(sp, readOnlyStore));
}
return Futures.allOf(segmentsCompletion);
}
private CompletableFuture<Void> waitForSegmentInStorage(SegmentProperties sp, StreamSegmentStore readOnlyStore) {
if (sp.getLength() == 0) {
// Empty segments may or may not exist in Storage, so don't bother complicating ourselves with this.
return CompletableFuture.completedFuture(null);
}
TimeoutTimer timer = new TimeoutTimer(TIMEOUT);
AtomicBoolean tryAgain = new AtomicBoolean(true);
return Futures.loop(
tryAgain::get,
() -> Futures
.exceptionallyExpecting(readOnlyStore.getStreamSegmentInfo(sp.getName(), TIMEOUT),
ex -> ex instanceof StreamSegmentNotExistsException,
StreamSegmentInformation.builder().name(sp.getName()).build())
.thenCompose(storageProps -> {
if (sp.isSealed()) {
tryAgain.set(!storageProps.isSealed());
} else {
tryAgain.set(sp.getLength() != storageProps.getLength());
}
if (tryAgain.get() && !timer.hasRemaining()) {
return Futures.<Void>failedFuture(new TimeoutException(
String.format("Segment %s did not complete in Storage in the allotted time.", sp.getName())));
} else {
return Futures.delayedFuture(Duration.ofMillis(100), executorService());
}
}), executorService());
}
private int applyFencingMultiplier(int originalValue) {
return (int) Math.round(originalValue * getFencingTestOperationMultiplier());
}
//endregion
//region FencingTestContext
/**
* Context for the Fencing test.
*/
private class FencingTestContext implements AutoCloseable {
private final Retry.RetryAndThrowConditionally newInstanceRetry =
Retry.withExpBackoff(20, 2, 20, TIMEOUT.toMillis() / 10)
.retryWhen(ex -> Exceptions.unwrap(ex) instanceof DataLogWriterNotPrimaryException);
private final AtomicReference<StreamSegmentStore> activeStore = new AtomicReference<>();
private final AtomicInteger iteration = new AtomicInteger();
private final ArrayList<ServiceBuilder> builders = new ArrayList<>();
private final AtomicReference<CompletableFuture<Void>> newInstanceCompletions = new AtomicReference<>(CompletableFuture.completedFuture(null));
@Override
public void close() {
log.info("Stopping all instances.");
this.builders.forEach(ServiceBuilder::close);
}
/**
* Gets a pointer to the active StreamSegmentStore.
*/
StreamSegmentStore getActiveStore() {
return this.activeStore.get();
}
/**
* Gets a value representing the current test iteration.
*/
int getIteration() {
return this.iteration.get();
}
/**
* Gets a CompletableFuture that, when completed, will indicate that all calls to createNewInstanceAsync() so far
* will have completed (successfully or not).
*/
CompletableFuture<Void> awaitAllInitializations() {
return this.newInstanceCompletions.get();
}
/**
* Same as createNewInstance(), but runs asynchronously, and only after the previous initialization completed.
*/
void createNewInstanceAsync() {
this.newInstanceCompletions.set(
this.newInstanceCompletions.get().thenRunAsync(this::createNewInstance, executorService()));
}
/**
* Creates a new Segment Store Instance, with retries.
* Normally we have the Controller coordinating which instances are the rightful survivors, however in this case
* we need to simulate some of this behavior ourselves, by being insistent. It is possible that previous instances
* meddle with the BKLog ZK metadata during the new instance's initialization, causing the new instance to wrongfully
* assume it's not the rightful survivor. A quick retry solves this problem, as there is no other kind of information
* available to disambiguate this.
*/
void createNewInstance() {
this.newInstanceRetry.run(() -> {
int instanceId = getIteration() + 1;
log.info("Starting Instance {}.", instanceId);
ServiceBuilder b = createBuilder(instanceId);
this.builders.add(b);
this.activeStore.set(b.createStreamSegmentService());
this.iteration.incrementAndGet();
log.info("Instance {} Started.", instanceId);
return null;
});
}
}
//endregion
@FunctionalInterface
private interface StoreRequest {
CompletableFuture<Void> apply(StreamSegmentStore store);
}
}
|
/**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.krad.labs.kitchensink;
import org.junit.Ignore;
import org.junit.Test;
import org.openqa.selenium.Keys;
/**
* @author Kuali Rice Team (rice.collab@kuali.org)
*/
public class LabsLookupsAft extends LabsKitchenSinkBase {
/**
* /kr-krad/uicomponents?viewId=UifCompView&pageId=UifCompView-Page6&lightbox=true
*/
public static final String BOOKMARK_URL = "/kr-krad/uicomponents?viewId=UifCompView&pageId=UifCompView-Page6&lightbox=true";
private static final String IFRAME_XPATH="//iframe[@class='fancybox-iframe']";
@Override
protected String getBookmarkUrl() {
return BOOKMARK_URL;
}
@Override
protected void navigate() throws Exception {
navigateToKitchenSink("Lookups, etc");
}
@Test
@Ignore // https://jira.kuali.org/browse/RICEQA-434 AFT Failures in CI that pass locally
public void testAjaxLookupBookmark() throws Exception {
testAjaxLookup();
passed();
}
@Test
@Ignore // https://jira.kuali.org/browse/RICEQA-434 AFT Failures in CI that pass locally
public void testAjaxLookupNav() throws Exception {
testAjaxLookup();
passed();
}
@Test
public void testLookupsBookmark() throws Exception {
testLookups();
passed();
}
@Test
public void testLookupsNav() throws Exception {
testLookups();
passed();
}
// this has been failing in CI
protected void testAjaxLookup() throws InterruptedException {
clearTextByName("field79");
waitAndTypeByName("field79", "a3");
driver.switchTo().activeElement().sendKeys(Keys.TAB);
// fireEvent("field79", "blur");
// waitAndClickByName("field60"); // force blur on field79
waitForTextPresent("Travel Account 3");
}
protected void testLookups() throws InterruptedException {
// a2 link is now gone, bug or feature?
// waitAndClickByLinkText("a2");
// gotoLightBox();
// assertTextPresent(new String[] {"Travel Account Number:", "a2", "Travel Account Name:", "Travel Account 2", "Code And Description:", "EAT - Expense"});
// waitAndClickButtonByText("Close");
clearTextByName("field72");
waitAndTypeByName("field72", "a2");
fireEvent("field72", "blur");
waitAndClickByName("field76"); // force blur on field72
waitForTextPresent("Travel Account 2");
clearTextByName("field76");
waitAndTypeByName("field76", "a1");
fireEvent("field76", "blur");
waitForTextPresent("Travel Account 1");
waitAndClickByXpath("//a[@class='uif-actionLink icon-search']");
gotoIframeByXpath(IFRAME_XPATH);
waitAndClickButtonByText("Search");
waitAndClickReturnValue();
waitAndClickByXpath("//div[@data-parent='refreshLookups1']/div/span/a");
gotoIframeByXpath(IFRAME_XPATH);
waitAndClickButtonByText("Search");
waitAndClickReturnValue();
clearTextByName("field70");
waitAndTypeByName("field70", "a1");
waitAndClickByXpath("//input[@title='Direct Inquiry']");
gotoLightBox();
assertTextPresent(new String[] {"Travel Account Number:", "a1", "Travel Account Name:", "Travel Account 1", "Code And Description:", "IAT - Income"});
waitAndClickButtonByText("Close");
}
}
|
package de._125m125.kt.ktapi.smartCache;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.time.Clock;
import java.time.Instant;
import java.time.ZoneId;
import java.util.Arrays;
import java.util.Optional;
import org.junit.Before;
import org.junit.Test;
import de._125m125.kt.ktapi.smartCache.objects.TimestampedList;
public class CacheDataTest {
private final class ClockExtension extends Clock {
private long current = 1000;
@Override
public ZoneId getZone() {
return ZoneId.of("Z");
}
@Override
public Clock withZone(final ZoneId zone) {
throw new RuntimeException("This clock does not support Clock#whithZone()");
}
@Override
public Instant instant() {
return Instant.ofEpochMilli(this.current);
}
public void progress() {
this.current += 1000;
}
}
private CacheData<String> uut;
private ClockExtension testClock;
@Before
public void beforeCacheDataTest() {
this.testClock = new ClockExtension();
this.uut = new CacheData<>(this.testClock);
this.testClock.progress();
}
@Test
public void testSetAndGet_0To2() throws Exception {
this.uut.set(Arrays.asList("a", "b"), 0, 2);
final Optional<TimestampedList<String>> actual = this.uut.get(0, 2);
assertEquals(Optional.of(new TimestampedList<>(Arrays.asList("a", "b"), 1000, true)), actual);
assertTrue(actual.get().wasCacheHit());
assertEquals(1000, actual.get().getTimestamp());
}
@Test
public void testSetAndGet_2To4() throws Exception {
this.uut.set(Arrays.asList("c", "d"), 2, 4);
final Optional<TimestampedList<String>> actual = this.uut.get(2, 4);
assertEquals(Optional.of(new TimestampedList<>(Arrays.asList("c", "d"), 1000, true)), actual);
assertTrue(actual.get().wasCacheHit());
assertEquals(1000, actual.get().getTimestamp());
}
@Test
public void testSetAndGet_6To7() throws Exception {
this.uut.set(Arrays.asList("g"), 6, 7);
final Optional<TimestampedList<String>> actual = this.uut.get(6, 7);
assertEquals(Optional.of(new TimestampedList<>(Arrays.asList("g"), 1000, true)), actual);
assertTrue(actual.get().wasCacheHit());
assertEquals(1000, actual.get().getTimestamp());
}
@Test
public void testSetAndGet_4To5_5To6() throws Exception {
this.uut.set(Arrays.asList("a"), 4, 5);
this.uut.set(Arrays.asList("b"), 5, 6);
final Optional<TimestampedList<String>> actual = this.uut.get(4, 6);
assertEquals(Optional.of(new TimestampedList<>(Arrays.asList("a", "b"), 1000, true)), actual);
assertTrue(actual.get().wasCacheHit());
assertEquals(1000, actual.get().getTimestamp());
}
@Test
public void testSetAndGet_missingBefore() throws Exception {
this.uut.set(Arrays.asList("a"), 4, 5);
assertEquals(Optional.empty(), this.uut.get(3, 4));
}
@Test
public void testSetAndGet_missingPartBefore() throws Exception {
this.uut.set(Arrays.asList("a"), 4, 5);
assertEquals(Optional.empty(), this.uut.get(3, 5));
}
@Test
public void testSetAndGet_missingAfter() throws Exception {
this.uut.set(Arrays.asList("a"), 4, 5);
assertEquals(Optional.empty(), this.uut.get(5, 6));
}
@Test
public void testSetAndGet_missingPartAfter() throws Exception {
this.uut.set(Arrays.asList("a"), 4, 5);
assertEquals(Optional.empty(), this.uut.get(4, 6));
}
@Test
public void testGet_empty() throws Exception {
assertEquals(Optional.empty(), this.uut.get(0, 2));
}
@Test
public void testSetAndGet_single_6() throws Exception {
this.uut.set(Arrays.asList("g"), 6, 7);
assertEquals(Optional.of("g"), this.uut.get(6));
}
@Test
public void testSetAndGet_single_missingBefore() throws Exception {
this.uut.set(Arrays.asList("a"), 4, 5);
assertEquals(Optional.empty(), this.uut.get(3));
}
@Test
public void testSetAndGet_single_missingAfter() throws Exception {
this.uut.set(Arrays.asList("a"), 4, 5);
assertEquals(Optional.empty(), this.uut.get(5));
}
@Test(expected = IllegalArgumentException.class)
public void testSet_failure_listTooShort() throws Exception {
this.uut.set(Arrays.asList("c", "d"), 2, 5);
}
@Test(expected = ArrayIndexOutOfBoundsException.class)
public void testSet_failure_negativeIndex() throws Exception {
this.uut.set(Arrays.asList("c", "d"), -1, 1);
}
@Test
public void testInvalidate_empty() throws Exception {
this.uut.invalidate();
}
@Test
public void testInvalidate_invalidatesHit() throws Exception {
this.uut.set(Arrays.asList("a"), 4, 5);
this.uut.invalidate();
assertEquals(Optional.empty(), this.uut.get(4, 5));
}
@Test
public void testInvalidate_changesTime() throws Exception {
this.uut.invalidate();
this.testClock.progress();
assertEquals(2000L, this.uut.getLastInvalidationTime());
this.uut.invalidate();
assertEquals(3000L, this.uut.getLastInvalidationTime());
}
@Test
public void testGetAll() throws Exception {
this.uut.set(Arrays.asList("a", "b"), 0, 2);
this.uut.set(Arrays.asList("c"), 2, 3);
final Optional<TimestampedList<String>> actual = this.uut.getAll();
assertEquals(Optional.of(new TimestampedList<>(Arrays.asList("a", "b", "c"), 1000, true)), actual);
assertTrue(actual.get().wasCacheHit());
assertEquals(1000, actual.get().getTimestamp());
}
@Test
public void testGetAll_empty() throws Exception {
final Optional<TimestampedList<String>> actual = this.uut.getAll();
assertEquals(Optional.empty(), actual);
}
@Test
public void testInvalidate_invalidatesGetAll() throws Exception {
this.uut.set(Arrays.asList("a"), 4, 5);
this.uut.invalidate();
assertEquals(Optional.empty(), this.uut.getAll());
}
@Test
public void testGetAny_empty() throws Exception {
final Optional<String> actual = this.uut.getAny("c"::equals);
assertEquals(Optional.empty(), actual);
}
@Test
public void testGetAny_hit() throws Exception {
this.uut.set(Arrays.asList("a", "b"), 0, 2);
this.uut.set(Arrays.asList("c"), 2, 3);
final Optional<String> actual = this.uut.getAny("c"::equals);
assertEquals(Optional.of("c"), actual);
}
@Test
public void testGetAny_multiple() throws Exception {
this.uut.set(Arrays.asList("c", "c"), 0, 2);
this.uut.set(Arrays.asList("c"), 2, 3);
final Optional<String> actual = this.uut.getAny("c"::equals);
assertEquals(Optional.of("c"), actual);
}
@Test
public void testGetAny_miss() throws Exception {
this.uut.set(Arrays.asList("a", "b"), 0, 2);
this.uut.set(Arrays.asList("c"), 2, 3);
final Optional<String> actual = this.uut.getAny("d"::equals);
assertEquals(Optional.empty(), actual);
}
}
|
package io.github.jhipster.registry.web.rest;
import com.codahale.metrics.annotation.Timed;
import io.github.jhipster.registry.web.rest.dto.UserDTO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.core.userdetails.User;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import javax.servlet.http.HttpServletRequest;
import java.util.stream.Collectors;
/**
* REST controller for managing the current user's account.
*/
@RestController
@RequestMapping("/api")
public class AccountResource {
private final Logger log = LoggerFactory.getLogger(AccountResource.class);
/**
* GET /authenticate : check if the user is authenticated, and return its login.
*
* @param request the HTTP request
* @return the login if the user is authenticated
*/
@GetMapping("/authenticate")
@Timed
public String isAuthenticated(HttpServletRequest request) {
log.debug("REST request to check if the current user is authenticated");
return request.getRemoteUser();
}
/**
* GET /account : get the current user.
*
* @return the ResponseEntity with status 200 (OK) and the current user in body, or status 500 (Internal Server
* Error) if the user couldn't be returned
*/
@GetMapping("/account")
@Timed
public ResponseEntity<UserDTO> getAccount() {
Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
User user = (User) authentication.getPrincipal();
UserDTO userDTO = new UserDTO(user.getUsername(),
user.getAuthorities().stream()
.map(authority -> authority.getAuthority()).collect(Collectors.toSet()));
return new ResponseEntity<>(userDTO, HttpStatus.OK);
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.tsfile.read.common;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import org.apache.iotdb.tsfile.read.expression.IExpression;
import org.apache.iotdb.tsfile.read.expression.impl.BinaryExpression;
import org.apache.iotdb.tsfile.read.expression.impl.GlobalTimeExpression;
import org.apache.iotdb.tsfile.read.filter.TimeFilter;
/**
* interval [min,max] of long data type
*
* Reference: http://www.java2s.com/Code/Java/Collections-Data-Structure/Anumericalinterval.htm
*
* @author ryanm
*/
public class TimeRange implements Comparable<TimeRange> {
/**
* The lower value
*/
private long min = 0;
/**
* The upper value
*/
private long max = 0;
/**
* Initialize a closed interval [min,max].
*
* @param min the left endpoint of the closed interval
* @param max the right endpoint of the closed interval
*/
public TimeRange(long min, long max) {
set(min, max);
}
@Override
public int compareTo(TimeRange r) {
if (r == null) {
throw new NullPointerException("The input cannot be null!");
}
long res1 = this.min - r.min;
if (res1 > 0) {
return 1;
} else if (res1 < 0) {
return -1;
} else {
long res2 = this.max - r.max;
if (res2 > 0) {
return 1;
} else if (res2 < 0) {
return -1;
} else {
return 0;
}
}
}
public void setMin(long min) {
if (min < 0 || min > this.max) {
throw new IllegalArgumentException("Invalid input!");
}
this.min = min;
}
public void setMax(long max) {
if (max < 0 || max < this.min) {
throw new IllegalArgumentException("Invalid input!");
}
this.max = max;
}
/**
* @return true if the given range lies in this range, inclusively
*/
public boolean contains(TimeRange r) {
return min <= r.min && max >= r.max;
}
/**
* Set a closed interval [min,max].
*
* @param min the left endpoint of the closed interval
* @param max the right endpoint of the closed interval
*/
public void set(long min, long max) {
if (min > max) {
throw new IllegalArgumentException("min should not be larger than max.");
}
this.min = min;
this.max = max;
}
/**
* @return The lower range boundary
*/
public long getMin() {
return min;
}
/**
* @return The upper range boundary
*/
public long getMax() {
return max;
}
/**
* Here are some examples.
*
* [1,3] does not intersect with (4,5].
*
* [1,3) does not intersect with (3,5]
*
* [1,3] does not intersect with [5,6].
*
* [1,3] intersects with [2,5].
*
* [1,3] intersects with (3,5].
*
* [1,3) intersects with (2,5].
*
* @param r the given time range
* @return true if the current time range intersects with the given time range r
*/
private boolean intersects(TimeRange r) {
if ((!leftClose || !r.rightClose) && (r.max < min)) {
// e.g., [1,3] does not intersect with (4,5].
return false;
} else if (!leftClose && !r.rightClose && r.max <= min) {
// e.g.,[1,3) does not intersect with (3,5]
return false;
} else if (leftClose && r.rightClose && r.max <= min - 2) {
// e.g.,[1,3] does not intersect with [5,6].
return true;
} else if ((!rightClose || !r.leftClose) && (r.min > max)) {
return false;
} else if (!rightClose && r.leftClose && r.min >= max) {
return false;
} else if (rightClose && r.leftClose && r.min >= max + 2) {
return false;
} else {
return true;
}
}
@Override
public String toString() {
StringBuilder res = new StringBuilder();
if (leftClose) {
res.append("[ ");
} else {
res.append("( ");
}
res.append(min).append(" : ").append(max);
if (rightClose) {
res.append(" ]");
} else {
res.append(" )");
}
return res.toString();
}
// NOTE the primitive timeRange is always a closed interval [min,max] and
// only in getRemains functions are leftClose and rightClose considered.
private boolean leftClose = true; // default true
private boolean rightClose = true; // default true
private void setLeftClose(boolean leftClose) {
this.leftClose = leftClose;
}
private void setRightClose(boolean rightClose) {
this.rightClose = rightClose;
}
public boolean getLeftClose() {
return leftClose;
}
public boolean getRightClose() {
return rightClose;
}
/**
* Return the union of the given time ranges.
*
* @param unionCandidates time ranges to be merged
* @return the union of time ranges
*/
public static List<TimeRange> sortAndMerge(List<TimeRange> unionCandidates) {
//sort the time ranges in ascending order of the start time
Collections.sort(unionCandidates);
ArrayList<TimeRange> unionResult = new ArrayList<>();
Iterator<TimeRange> iterator = unionCandidates.iterator();
TimeRange rangeCurr;
if (!iterator.hasNext()) {
return unionResult;
} else {
rangeCurr = iterator.next();
}
while (iterator.hasNext()) {
TimeRange rangeNext = iterator.next();
if (rangeCurr.intersects(rangeNext)) {
rangeCurr.set(Math.min(rangeCurr.getMin(), rangeNext.getMin()),
Math.max(rangeCurr.getMax(), rangeNext.getMax()));
} else {
unionResult.add(rangeCurr);
rangeCurr = rangeNext;
}
}
unionResult.add(rangeCurr);
return unionResult;
}
/**
* Get the remaining time ranges in the current ranges but not in timeRangesPrev.
*
* NOTE the primitive timeRange is always a closed interval [min,max] and only in this function
* are leftClose and rightClose changed.
*
* @param timeRangesPrev time ranges union in ascending order of the start time
* @return the remaining time ranges
*/
public List<TimeRange> getRemains(List<TimeRange> timeRangesPrev) {
List<TimeRange> remains = new ArrayList<>();
for (TimeRange prev : timeRangesPrev) {
// +2 is to keep consistent with the definition of `intersects` of two closed intervals
if (prev.min >= max + 2) {
// break early since timeRangesPrev is sorted
break;
}
if (intersects(prev)) {
if (prev.contains(this)) {
// e.g., this=[3,5], prev=[1,10]
// e.g., this=[3,5], prev=[3,5] Note that in this case, prev contains this and vice versa.
return remains;
} else if (this.contains(prev)) {
if (prev.min > this.min && prev.max == this.max) {
// e.g., this=[1,6], prev=[3,6]
this.setMax(prev.min);
this.setRightClose(false);
remains.add(this);
// return the final result because timeRangesPrev is sorted
return remains;
} else if (prev.min == this.min) {
// Note prev.max < this.max
// e.g., this=[1,10], prev=[1,4]
min = prev.max;
leftClose = false;
} else {
// e.g., prev=[3,6], this=[1,10]
TimeRange r = new TimeRange(this.min, prev.min);
r.setLeftClose(this.leftClose);
r.setRightClose(false);
remains.add(r);
min = prev.max;
leftClose = false;
}
} else {
// intersect without one containing the other
if (prev.min < this.min) {
// e.g., this=[3,10], prev=[1,6]
min = prev.max;
leftClose = false;
} else {
// e.g., this=[1,8], prev=[5,12]
this.setMax(prev.min);
this.setRightClose(false);
remains.add(this);
// return the final result because timeRangesPrev is sorted
return remains;
}
}
}
}
remains.add(this);
return remains;
}
public IExpression getExpression() {
IExpression left;
IExpression right;
if (leftClose) {
left = new GlobalTimeExpression(TimeFilter.gtEq(min));
} else {
left = new GlobalTimeExpression(TimeFilter.gt(min));
}
if (rightClose) {
right = new GlobalTimeExpression(TimeFilter.ltEq(max));
} else {
right = new GlobalTimeExpression(TimeFilter.lt(max));
}
return BinaryExpression.and(left, right);
}
}
|
package kr.co.popone.fitts.feature.push.order;
import android.os.Bundle;
import android.view.View;
import android.view.View.OnClickListener;
import kr.co.popone.fitts.feature.order.detail.OrderDetailActivity;
final class PaymentCancelMisPriceFragment$onViewCreated$2 implements OnClickListener {
final /* synthetic */ PaymentCancelMisPriceFragment this$0;
PaymentCancelMisPriceFragment$onViewCreated$2(PaymentCancelMisPriceFragment paymentCancelMisPriceFragment) {
this.this$0 = paymentCancelMisPriceFragment;
}
public final void onClick(View view) {
Bundle arguments = this.this$0.getArguments();
if (arguments != null) {
OrderDetailActivity.Companion.start(this.this$0.getContext(), arguments.getLong(PaymentCancelMisPriceFragment.KEY_ORDER_ID));
}
}
}
|
package colin.web.homework.service;
import colin.web.homework.core.dao.decoratedao.NavManageDao;
import colin.web.homework.core.pojo.Homework_Nav_Manage_Entity;
import colin.web.homework.core.rowmapper.DefaultRowmapper;
import colin.web.homework.core.vo.HomeworkNavManageVo;
import colin.web.homework.tools.DateToolsUtils;
import colin.web.homework.tools.StringToolsUtils;
import org.apache.commons.beanutils.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Created by ASUS on 2015/12/27.
*/
@Service
@Transactional
public class NavManageService {
@Autowired
private NavManageDao navManageDao;
public List<HomeworkNavManageVo> fetchAllNavManage() {
List<Homework_Nav_Manage_Entity> navManageEntities = navManageDao.fetchAllNavManageEntity();
if (navManageEntities == null || navManageEntities.isEmpty()) {
return null;
} else {
List<HomeworkNavManageVo> rootNavManageEntities = new ArrayList<HomeworkNavManageVo>(navManageEntities.size());
List<HomeworkNavManageVo> subNavManageEntities = new ArrayList<HomeworkNavManageVo>(navManageEntities.size());
for (Homework_Nav_Manage_Entity nav_manage_entity : navManageEntities) {
HomeworkNavManageVo navManageVo = new HomeworkNavManageVo();
try {
BeanUtils.copyProperties(navManageVo, nav_manage_entity);
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
if (nav_manage_entity.getNav_parent_id().equals("root")) {
rootNavManageEntities.add(navManageVo);
} else {
subNavManageEntities.add(navManageVo);
}
}
for (HomeworkNavManageVo rootEntity : rootNavManageEntities) {
rootEntity.setChildNavManageVoList(assembleNavManage(rootEntity, subNavManageEntities));
}
return rootNavManageEntities;
}
}
/**
* 递归元素
*
* @param rootNavManageEntity
* @param subNavManageEntities
* @return
*/
public List<HomeworkNavManageVo> assembleNavManage(HomeworkNavManageVo rootNavManageEntity, List<HomeworkNavManageVo> subNavManageEntities) {
List<HomeworkNavManageVo> childNavManageList = new ArrayList<HomeworkNavManageVo>();
for (HomeworkNavManageVo subNavManageNav : subNavManageEntities) {
if (rootNavManageEntity.getNav_id().equals(subNavManageNav.getNav_parent_id())) {
subNavManageNav.setChildNavManageVoList(assembleNavManage(subNavManageNav, subNavManageEntities));
childNavManageList.add(subNavManageNav);
}
}
return childNavManageList;
}
public void addNavManageEntity(Map<String, Object> params) {
Homework_Nav_Manage_Entity navManageEntity=copyNavManageEntity(params);
navManageEntity.setNav_id(StringToolsUtils.getCommonUUID());
navManageEntity.setNav_createtime(DateToolsUtils.getTodayCurrentTime());
navManageDao.addObjInfo(navManageEntity);
}
public void delNavManageEntity(String idVal) {
navManageDao.deleteObjectById(Homework_Nav_Manage_Entity.class, idVal);
}
public void updateMangeEntity(Map<String, Object> params) {
navManageDao.updateObjInfo(copyNavManageEntity(params));
}
public List<Homework_Nav_Manage_Entity> fetchAppRootNavEntity() {
Map<String, Object> params = new HashMap<String, Object>();
params.put("nav_parent_id", "root");
return navManageDao.seletcObjectByMap(Homework_Nav_Manage_Entity.class, params, new DefaultRowmapper<Homework_Nav_Manage_Entity>(Homework_Nav_Manage_Entity.class.getName()));
}
/**
* 生成nav对象
*
* @param params
* @return
*/
public Homework_Nav_Manage_Entity copyNavManageEntity(Map<String, Object> params) {
Homework_Nav_Manage_Entity navManageEntity = new Homework_Nav_Manage_Entity();
try {
BeanUtils.copyProperties(navManageEntity, params);
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
return navManageEntity;
}
}
|
package org.sv.flexobject.hadoop.mapreduce.input.mongo.oplog;
import org.sv.flexobject.hadoop.mapreduce.input.ConfiguredInputFormat;
import org.sv.flexobject.hadoop.mapreduce.input.InputConf;
import org.sv.flexobject.util.InstanceFactory;
public class OplogInputFormat<K,V> extends ConfiguredInputFormat<K,V> {
@Override
protected InputConf<InputConf> makeInputConf() {
return InstanceFactory.get(OplogInputConf.class);
}
}
|
package colesico.framework.rpc.clientapi;
/**
* RPC request handler called before request been sent.
* Can be used to enrich request with custom values (security principal, profile, tracing id, e.t.c.)
*/
public interface RpcRequestHandler<Q> {
void onRequest(Q request);
}
|
/*
* Copyright 2008 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.rioproject.resources.util;
import com.sun.jini.constants.ThrowableConstants;
import org.rioproject.deploy.ServiceBeanInstantiationException;
import java.lang.reflect.InvocationTargetException;
/**
* Utility for getting things from a Throwable
*
* @author Dennis Reedy
*/
@SuppressWarnings("PMD.AvoidThrowingRawExceptionTypes")
public class ThrowableUtil {
public static Throwable getRootCause(Throwable e) {
Throwable cause = e;
if(e instanceof InvocationTargetException) {
cause = e.getCause()==null? ((InvocationTargetException)e).getTargetException(): e.getCause();
} else if(e instanceof ServiceBeanInstantiationException) {
if(((ServiceBeanInstantiationException)e).getCauseExceptionDescriptor()!=null) {
ServiceBeanInstantiationException.ExceptionDescriptor exDesc =
((ServiceBeanInstantiationException)e).getCauseExceptionDescriptor();
if(exDesc.getCauses().size()>0) {
exDesc = exDesc.getCauses().get(0);
}
Throwable t = new Throwable(exDesc.getMessage());
t.setStackTrace(exDesc.getStacktrace());
return t;
}
} else {
Throwable t = cause;
while(t != null) {
t = cause.getCause();
if(t != null)
cause = t;
}
}
return (cause);
}
public static boolean isRetryable(Throwable t) {
boolean retryable = true;
final int category = ThrowableConstants.retryable(t);
Throwable cause = getRootCause(t);
if (category == ThrowableConstants.BAD_INVOCATION ||
category == ThrowableConstants.BAD_OBJECT ||
cause instanceof java.net.ConnectException) {
retryable = false;
}
return retryable;
}
}
|
/* SPDX-License-Identifier: Apache 2.0 */
/* Copyright Contributors to the ODPi Egeria project. */
package org.odpi.openmetadata.accessservices.digitalarchitecture.outtopic;
import org.odpi.openmetadata.accessservices.digitalarchitecture.connectors.outtopic.DigitalArchitectureOutTopicServerConnector;
import org.odpi.openmetadata.accessservices.digitalarchitecture.converters.ElementHeaderConverter;
import org.odpi.openmetadata.accessservices.digitalarchitecture.events.DigitalArchitectureEventType;
import org.odpi.openmetadata.accessservices.digitalarchitecture.events.DigitalArchitectureOutTopicEvent;
import org.odpi.openmetadata.accessservices.digitalarchitecture.ffdc.DigitalArchitectureAuditCode;
import org.odpi.openmetadata.accessservices.digitalarchitecture.metadataelements.ElementHeader;
import org.odpi.openmetadata.frameworks.auditlog.AuditLog;
import org.odpi.openmetadata.repositoryservices.connectors.stores.metadatacollectionstore.properties.instances.Classification;
import org.odpi.openmetadata.repositoryservices.connectors.stores.metadatacollectionstore.properties.instances.EntityDetail;
import org.odpi.openmetadata.repositoryservices.connectors.stores.metadatacollectionstore.repositoryconnector.OMRSRepositoryHelper;
/**
* DigitalArchitectureOutTopicPublisher is responsible for sending events on the IT Infrastructure OMAS's out topic.
* It is called from the IT Infrastructure OMAS's OMRS Topic Listener.
*/
public class DigitalArchitectureOutTopicPublisher
{
private DigitalArchitectureOutTopicServerConnector outTopicServerConnector;
private AuditLog outTopicAuditLog;
private String outTopicName;
private ElementHeaderConverter<ElementHeader> headerConverter;
private OMRSRepositoryHelper repositoryHelper;
private final String actionDescription = "Out topic configuration refresh event publishing";
/**
* Constructor for the publisher.
*
* @param outTopicServerConnector connector to the out topic
* @param outTopicName name of the out topic
* @param outTopicAuditLog logging destination if anything goes wrong.
* @param repositoryHelper helper object to parse entity/relationship objects
* @param serviceName name of this component
* @param serverName local server name
*/
public DigitalArchitectureOutTopicPublisher(DigitalArchitectureOutTopicServerConnector outTopicServerConnector,
String outTopicName,
AuditLog outTopicAuditLog,
OMRSRepositoryHelper repositoryHelper,
String serviceName,
String serverName)
{
this.outTopicServerConnector = outTopicServerConnector;
this.outTopicAuditLog = outTopicAuditLog;
this.outTopicName = outTopicName;
this.repositoryHelper = repositoryHelper;
this.headerConverter = new ElementHeaderConverter<>(repositoryHelper, serviceName, serverName);
if (outTopicAuditLog != null)
{
outTopicAuditLog.logMessage(actionDescription, DigitalArchitectureAuditCode.SERVICE_PUBLISHING.getMessageDefinition(outTopicName));
}
}
/**
* Send the event to the embedded event bus connector(s).
*
* @param entity entity that is the subject of the event
* @param eventType type of event
*/
public void publishEntityEvent(EntityDetail entity,
DigitalArchitectureEventType eventType)
{
this.publishEntityEvent(eventType, entity, null, null, null);
}
/**
* Send the event to the embedded event bus connector(s).
*
* @param entity entity that is the subject of the event
* @param previousEntity previous version of the entity
* @param eventType type of event
* @param newClassification latest classification information (if the event relates to a classification)
* @param previousClassification previous classification information (if the event relates to a classification)
*/
public void publishEntityEvent(DigitalArchitectureEventType eventType,
EntityDetail entity,
EntityDetail previousEntity,
Classification newClassification,
Classification previousClassification)
{
final String methodName = "publishEntityEvent";
if (outTopicServerConnector != null)
{
DigitalArchitectureOutTopicEvent event = new DigitalArchitectureOutTopicEvent();
try
{
event.setEventType(eventType);
if (entity.getUpdateTime() == null)
{
event.setEventTime(entity.getCreateTime());
}
else
{
event.setEventTime(entity.getUpdateTime());
}
event.setElementHeader(headerConverter.getNewBean(ElementHeader.class, entity, methodName));
event.setElementProperties(repositoryHelper.getInstancePropertiesAsMap(entity.getProperties()));
if (previousEntity != null)
{
event.setPreviousElementHeader(headerConverter.getNewBean(ElementHeader.class, previousEntity, methodName));
event.setPreviousElementProperties(repositoryHelper.getInstancePropertiesAsMap(previousEntity.getProperties()));
}
if (newClassification != null)
{
event.setClassificationName(newClassification.getName());
}
if (previousClassification != null)
{
event.setClassificationName(previousClassification.getName());
event.setPreviousClassificationProperties(repositoryHelper.getInstancePropertiesAsMap(previousClassification.getProperties()));
}
outTopicServerConnector.sendEvent(event);
}
catch (Exception error)
{
outTopicAuditLog.logException(methodName,
DigitalArchitectureAuditCode.PROCESS_EVENT_EXCEPTION.getMessageDefinition(event.toString(),
error.getClass().getName(),
error.getMessage()),
error);
}
}
}
/**
* Shutdown the publishing process.
*/
public void disconnect()
{
try
{
outTopicServerConnector.disconnect();
if (outTopicAuditLog != null)
{
outTopicAuditLog.logMessage(actionDescription, DigitalArchitectureAuditCode.PUBLISHING_SHUTDOWN.getMessageDefinition(outTopicName));
}
}
catch (Exception error)
{
if (outTopicAuditLog != null)
{
outTopicAuditLog.logException(actionDescription,
DigitalArchitectureAuditCode.PUBLISHING_SHUTDOWN_ERROR.getMessageDefinition(error.getClass().getName(),
outTopicName,
error.getMessage()),
error);
}
}
}
}
|
package com.polydes.scenelink.data;
import com.polydes.scenelink.SceneLinkExtension;
import com.polydes.scenelink.ui.MainPage;
public class PageLink extends Link
{
public PageLink(int id)
{
super(id);
}
@Override
public void open()
{
MainPage.get().switchToPage(id);
}
@Override
public Object getModel()
{
return SceneLinkExtension.getPageModel(id);
}
}
|
/*
* Copyright (c) 1997, 2001, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package javax.swing.plaf.basic;
import sun.awt.AppContext;
import javax.swing.*;
import java.awt.*;
import java.awt.event.*;
import javax.swing.plaf.*;
import java.io.Serializable;
/**
* CheckboxUI implementation for BasicCheckboxUI
* <p>
* <strong>Warning:</strong>
* Serialized objects of this class will not be compatible with
* future Swing releases. The current serialization support is
* appropriate for short term storage or RMI between applications running
* the same version of Swing. As of 1.4, support for long term storage
* of all JavaBeans<sup><font size="-2">TM</font></sup>
* has been added to the <code>java.beans</code> package.
* Please see {@link java.beans.XMLEncoder}.
*
* @author Jeff Dinkins
*/
public class BasicCheckBoxUI extends BasicRadioButtonUI {
private static final Object BASIC_CHECK_BOX_UI_KEY = new Object();
private final static String propertyPrefix = "CheckBox" + ".";
// ********************************
// Create PLAF
// ********************************
public static ComponentUI createUI(JComponent b) {
AppContext appContext = AppContext.getAppContext();
BasicCheckBoxUI checkboxUI =
(BasicCheckBoxUI) appContext.get(BASIC_CHECK_BOX_UI_KEY);
if (checkboxUI == null) {
checkboxUI = new BasicCheckBoxUI();
appContext.put(BASIC_CHECK_BOX_UI_KEY, checkboxUI);
}
return checkboxUI;
}
public String getPropertyPrefix() {
return propertyPrefix;
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.proxy.backend.text.distsql.ral.common.queryable;
import com.zaxxer.hikari.HikariDataSource;
import org.apache.shardingsphere.distsql.parser.statement.ral.common.queryable.ExportDatabaseConfigurationStatement;
import org.apache.shardingsphere.infra.config.algorithm.ShardingSphereAlgorithmConfiguration;
import org.apache.shardingsphere.infra.database.type.dialect.MySQLDatabaseType;
import org.apache.shardingsphere.infra.metadata.database.ShardingSphereDatabase;
import org.apache.shardingsphere.infra.metadata.database.schema.decorator.model.ShardingSphereColumn;
import org.apache.shardingsphere.infra.metadata.database.schema.decorator.model.ShardingSphereIndex;
import org.apache.shardingsphere.infra.metadata.database.schema.decorator.model.ShardingSphereSchema;
import org.apache.shardingsphere.infra.metadata.database.schema.decorator.model.ShardingSphereTable;
import org.apache.shardingsphere.mode.manager.ContextManager;
import org.apache.shardingsphere.proxy.backend.context.ProxyContext;
import org.apache.shardingsphere.proxy.backend.session.ConnectionSession;
import org.apache.shardingsphere.proxy.backend.text.distsql.ral.RALBackendHandler.HandlerParameter;
import org.apache.shardingsphere.proxy.backend.util.ProxyContextRestorer;
import org.apache.shardingsphere.sharding.api.config.ShardingRuleConfiguration;
import org.apache.shardingsphere.sharding.api.config.rule.ShardingTableRuleConfiguration;
import org.apache.shardingsphere.sharding.api.config.strategy.keygen.KeyGenerateStrategyConfiguration;
import org.apache.shardingsphere.sharding.api.config.strategy.sharding.NoneShardingStrategyConfiguration;
import org.apache.shardingsphere.sharding.api.config.strategy.sharding.StandardShardingStrategyConfiguration;
import org.apache.shardingsphere.sql.parser.sql.common.segment.generic.DatabaseSegment;
import org.apache.shardingsphere.sql.parser.sql.common.value.identifier.IdentifierValue;
import org.junit.Before;
import org.junit.Test;
import javax.sql.DataSource;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import static org.mockito.Mockito.RETURNS_DEEP_STUBS;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public final class ExportDatabaseConfigurationHandlerTest extends ProxyContextRestorer {
@Before
public void init() {
ContextManager contextManager = mock(ContextManager.class, RETURNS_DEEP_STUBS);
ShardingSphereDatabase database = mock(ShardingSphereDatabase.class, RETURNS_DEEP_STUBS);
when(database.getSchemas().get("sharding_db")).thenReturn(new ShardingSphereSchema(createTableMap()));
when(database.getResource().getDataSources()).thenReturn(createDataSourceMap());
when(database.getRuleMetaData().getConfigurations()).thenReturn(Collections.singletonList(createShardingRuleConfiguration()));
when(contextManager.getMetaDataContexts().getMetaData().getDatabases()).thenReturn(Collections.singletonMap("sharding_db", database));
ProxyContext.init(contextManager);
}
@Test
public void assertExportDatabaseExecutor() throws SQLException {
ExportDatabaseConfigurationHandler handler = new ExportDatabaseConfigurationHandler().init(createParameter(createSQLStatement(), mock(ConnectionSession.class)));
handler.execute();
handler.next();
Collection<Object> data = new ArrayList<>(handler.getRowData());
assertThat(data.size(), is(1));
}
private ShardingRuleConfiguration createShardingRuleConfiguration() {
ShardingRuleConfiguration result = new ShardingRuleConfiguration();
result.getTables().add(createTableRuleConfiguration());
result.setDefaultDatabaseShardingStrategy(new StandardShardingStrategyConfiguration("order_id", "ds_inline"));
result.setDefaultTableShardingStrategy(new NoneShardingStrategyConfiguration());
result.getKeyGenerators().put("snowflake", new ShardingSphereAlgorithmConfiguration("SNOWFLAKE", new Properties()));
result.getShardingAlgorithms().put("ds_inline", new ShardingSphereAlgorithmConfiguration("INLINE", createProperties()));
String scalingName = "default_scaling";
result.setScalingName(scalingName);
result.getScaling().put(scalingName, null);
return result;
}
private Properties createProperties() {
Properties result = new Properties();
result.setProperty("algorithm-expression", "ds_${order_id % 2}");
return result;
}
private Map<String, DataSource> createDataSourceMap() {
Map<String, DataSource> result = new LinkedHashMap<>(2, 1);
result.put("ds_0", createDataSource("demo_ds_0"));
result.put("ds_1", createDataSource("demo_ds_1"));
return result;
}
private DataSource createDataSource(final String dbName) {
HikariDataSource result = new HikariDataSource();
result.setJdbcUrl(String.format("jdbc:mysql://127.0.0.1:3306/%s?serverTimezone=UTC&useSSL=false", dbName));
result.setUsername("root");
result.setPassword("");
result.setConnectionTimeout(30000L);
result.setIdleTimeout(60000L);
result.setMaxLifetime(1800000L);
result.setMaximumPoolSize(50);
result.setMinimumIdle(1);
return result;
}
private Map<String, ShardingSphereTable> createTableMap() {
Collection<ShardingSphereColumn> columns = Collections.singletonList(new ShardingSphereColumn("order_id", 0, false, false, false));
Collection<ShardingSphereIndex> indexes = Collections.singletonList(new ShardingSphereIndex("primary"));
return Collections.singletonMap("t_order", new ShardingSphereTable("t_order", columns, indexes, Collections.emptyList()));
}
private ShardingTableRuleConfiguration createTableRuleConfiguration() {
ShardingTableRuleConfiguration result = new ShardingTableRuleConfiguration("t_order", "ds_${0..1}.t_order_${0..1}");
result.setKeyGenerateStrategy(new KeyGenerateStrategyConfiguration("order_id", "snowflake"));
return result;
}
private ExportDatabaseConfigurationStatement createSQLStatement() {
return new ExportDatabaseConfigurationStatement(new DatabaseSegment(0, 0, new IdentifierValue("sharding_db")), null);
}
private HandlerParameter<ExportDatabaseConfigurationStatement> createParameter(final ExportDatabaseConfigurationStatement statement, final ConnectionSession connectionSession) {
return new HandlerParameter<>(statement, new MySQLDatabaseType(), connectionSession);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.