gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.androidx.car;
import android.app.Activity;
import android.content.Context;
import android.graphics.Point;
import android.os.Bundle;
import android.text.TextUtils;
import android.view.View;
import android.view.WindowManager;
import android.widget.Toast;
import androidx.car.widget.ListItem;
import androidx.car.widget.ListItemAdapter;
import androidx.car.widget.ListItemProvider;
import androidx.car.widget.PagedListView;
import androidx.car.widget.TextListItem;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Demo activity for {@link ListItem}.
*/
public class TextListItemActivity extends Activity {
private static int pixelToDip(Context context, int pixels) {
return (int) (pixels / context.getResources().getDisplayMetrics().density);
}
PagedListView mPagedListView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_paged_list_view);
mPagedListView = findViewById(R.id.paged_list_view);
SampleProvider provider = new SampleProvider(this);
ListItemAdapter adapter = new ListItemAdapter(this, provider,
ListItemAdapter.BackgroundStyle.SOLID);
final boolean[] hideDivider = {true};
// Demonstrate how to update list item post construction.
TextListItem toBeUpdated = new TextListItem(this);
toBeUpdated.setPrimaryActionEmptyIcon();
toBeUpdated.setTitle("tap next item to update my icon");
toBeUpdated.setHideDivider(hideDivider[0]);
provider.mItems.add(0, toBeUpdated);
boolean[] useEmptyIcon = new boolean[]{false};
TextListItem update = new TextListItem(this);
update.setTitle("tap me to update the icon of item above");
update.setOnClickListener(v -> {
// Change icon.
if (useEmptyIcon[0]) {
toBeUpdated.setPrimaryActionEmptyIcon();
} else {
toBeUpdated.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, false);
}
useEmptyIcon[0] = !useEmptyIcon[0];
// Show/hide item divider.
toBeUpdated.setHideDivider(hideDivider[0]);
hideDivider[0] = !hideDivider[0];
// Make sure to notify adapter about the change.
adapter.notifyItemChanged(0);
});
provider.mItems.add(1, update);
mPagedListView.setAdapter(adapter);
mPagedListView.setMaxPages(PagedListView.UNLIMITED_PAGES);
mPagedListView.setDividerVisibilityManager(adapter);
}
private static class SampleProvider extends ListItemProvider {
private Context mContext;
List<ListItem> mItems;
private View.OnClickListener mOnClickListener = v ->
Toast.makeText(mContext, "Clicked!", Toast.LENGTH_SHORT).show();
private View.OnClickListener mGetParentHeight = v -> {
int parentHeight = ((View) v.getParent().getParent()).getHeight();
Toast.makeText(v.getContext(),
"card height is " + pixelToDip(mContext, parentHeight) + " dp",
Toast.LENGTH_SHORT).show();
};
private ListItemProvider.ListProvider mListProvider;
SampleProvider(Context context) {
mContext = context;
mItems = new ArrayList<>();
TextListItem item;
item = new TextListItem(mContext);
item.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, true);
item.setTitle("clickable single line with full icon and one action");
item.setAction("card height", true, mGetParentHeight);
mItems.add(item);
item = new TextListItem(mContext);
item.setPrimaryActionIcon(mContext.getDrawable(R.drawable.pressed_icon), true);
item.setTitle("primary action set by drawable");
item.addViewBinder(vh -> vh.getPrimaryIcon().setClickable(true));
mItems.add(item);
item = new TextListItem(mContext);
item.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, false);
item.setTitle("clickable single line with small icon and clickable end icon");
item.setSupplementalIcon(android.R.drawable.sym_def_app_icon, true, mGetParentHeight);
mItems.add(item);
item = new TextListItem(mContext);
item.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, false);
item.setTitle("single line without a list divider");
item.setHideDivider(true);
mItems.add(item);
item = new TextListItem(mContext);
item.setOnClickListener(mOnClickListener);
item.setPrimaryActionEmptyIcon();
item.setTitle("clickable single line with empty icon and end icon no divider");
item.setSupplementalIcon(android.R.drawable.sym_def_app_icon, false);
mItems.add(item);
item = new TextListItem(mContext);
item.setTitle("title is single line and ellipsizes. "
+ mContext.getString(R.string.long_text));
item.setSupplementalIcon(android.R.drawable.sym_def_app_icon, true);
mItems.add(item);
item = new TextListItem(mContext);
item.setTitle("Subtitle-like line without a list divider");
item.setHideDivider(true);
item.addViewBinder(viewHolder ->
viewHolder.getTitle().setTextAppearance(R.style.CarListSubtitle));
mItems.add(item);
item = new TextListItem(mContext);
item.setPrimaryActionNoIcon();
item.setTitle("single line with two actions and no divider");
item.setActions("action 1", false,
v -> Toast.makeText(
v.getContext(), "action 1", Toast.LENGTH_SHORT).show(),
"action 2", false,
v -> Toast.makeText(
v.getContext(), "action 2", Toast.LENGTH_SHORT).show());
mItems.add(item);
item = new TextListItem(mContext);
item.setPrimaryActionNoIcon();
item.setTitle("single line with two actions and action 2 divider");
item.setActions("action 1", false,
v -> Toast.makeText(
v.getContext(), "action 1", Toast.LENGTH_SHORT).show(),
"action 2", true,
v -> Toast.makeText(
v.getContext(), "action 2", Toast.LENGTH_SHORT).show());
mItems.add(item);
item = new TextListItem(mContext);
item.setPrimaryActionNoIcon();
item.setTitle("single line with divider between actions. "
+ mContext.getString(R.string.long_text));
item.setActions("action 1", true,
v -> Toast.makeText(
v.getContext(), "action 1", Toast.LENGTH_SHORT).show(),
"action 2", false,
v -> Toast.makeText(
v.getContext(), "action 2", Toast.LENGTH_SHORT).show());
mItems.add(item);
item = new TextListItem(mContext);
item.setTitle("item longer than containing View size");
item.setBody(mContext.getResources().getString(R.string.super_long_text));
mItems.add(item);
item = new TextListItem(mContext);
item.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, true);
item.setTitle("double line with full icon and no end icon divider");
item.setBody("one line text");
item.setSupplementalIcon(android.R.drawable.sym_def_app_icon, false, mGetParentHeight);
mItems.add(item);
item = new TextListItem(mContext);
item.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, false);
item.setTitle("double line with small icon and one action");
item.setBody("one line text");
item.setAction("card height", true, mGetParentHeight);
mItems.add(item);
String tenChars = "Ten Chars.";
item = new TextListItem(mContext);
item.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, false);
item.setTitle("Card with small icon and text longer than limit");
item.setBody(TextUtils.join("", Collections.nCopies(20, tenChars)));
item.setSupplementalIcon(android.R.drawable.sym_def_app_icon, true, mGetParentHeight);
mItems.add(item);
item = new TextListItem(mContext);
item.setPrimaryActionEmptyIcon();
item.setTitle("double line with empty primary icon."
+ mContext.getString(R.string.long_text));
item.setBody("one line text as primary", true);
item.setActions("screen size", false, v -> {
Context c = v.getContext();
Point size = new Point();
c.getSystemService(WindowManager.class).getDefaultDisplay().getSize(size);
Toast.makeText(v.getContext(), String.format("%s x %s dp", pixelToDip(c, size.x),
pixelToDip(c, size.y)),
Toast.LENGTH_SHORT).show();
}, "card height", true, mGetParentHeight);
mItems.add(item);
item = new TextListItem(mContext);
item.setTitle("double line with no primary action and one divider");
item.setBody("one line text as primary", true);
item.setActions("screen size", false, v -> {
Context c = v.getContext();
Point size = new Point();
c.getSystemService(WindowManager.class).getDefaultDisplay().getSize(size);
Toast.makeText(v.getContext(),
String.format("%s x %s dp", pixelToDip(c, size.x),
pixelToDip(c, size.y)), Toast.LENGTH_SHORT).show();
}, "card height", true, mGetParentHeight);
mItems.add(item);
item = new TextListItem(mContext);
item.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, true);
item.setBody("Only body - no title is set");
item.setAction("card height", true, mGetParentHeight);
mItems.add(item);
item = new TextListItem(mContext);
item.setPrimaryActionIcon(android.R.drawable.sym_def_app_icon, false);
item.setBody("Only body - no title. " + mContext.getString(R.string.long_text));
mItems.add(item);
item = new TextListItem(mContext);
item.setTitle("Switch - initially unchecked");
item.setSwitch(false, true, (button, isChecked) -> {
Toast.makeText(mContext,
isChecked ? "checked" : "unchecked", Toast.LENGTH_SHORT).show();
});
mItems.add(item);
mListProvider = new ListItemProvider.ListProvider(mItems);
}
@Override
public ListItem get(int position) {
return mListProvider.get(position);
}
@Override
public int size() {
return mListProvider.size();
}
}
}
| |
package com.palmelf.eoffice.model.hrm;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.commons.lang.builder.ToStringBuilder;
import com.google.gson.annotations.Expose;
import com.palmelf.core.model.BaseModel;
import com.palmelf.eoffice.model.system.FileAttach;
@Entity
@Table(name = "user_contract")
public class UserContract extends BaseModel {
private static final long serialVersionUID = 8337681620729350344L;
@Expose
private Long contractId;
@Expose
private Long userId;
@Expose
private String contractNo;
@Expose
private String fullname;
@Expose
private Integer status;
@Expose
private String timeLimit;
@Expose
private Integer isCompeted;
@Expose
private Integer isSecret;
@Expose
private String breakBurden;
@Expose
private String otherItems;
@Expose
private String contractType;
@Expose
private Date signDate;
@Expose
private Date startDate;
@Expose
private Date expireDate;
@Expose
private Set<FileAttach> contractAttachs = new HashSet<FileAttach>();
@Expose
private Set<ContractEvent> contractEvents = new HashSet<ContractEvent>();
public UserContract() {
}
public UserContract(Long in_contractId) {
this.setContractId(in_contractId);
}
// Property accessors
@Id
@GeneratedValue
@Column(name = "contractId", unique = true, nullable = false)
public Long getContractId() {
return this.contractId;
}
public void setContractId(Long contractId) {
this.contractId = contractId;
}
@Column(name = "contractNo", nullable = false, length = 64)
public String getContractNo() {
return this.contractNo;
}
public void setContractNo(String contractNo) {
this.contractNo = contractNo;
}
@Column(name = "fullname", length = 64)
public String getFullname() {
return this.fullname;
}
public void setFullname(String fullname) {
this.fullname = fullname;
}
@Column(name = "userId")
public Long getUserId() {
return this.userId;
}
public void setUserId(Long userId) {
this.userId = userId;
}
@Column(name = "status")
public Integer getStatus() {
return this.status;
}
public void setStatus(Integer status) {
this.status = status;
}
@Column(name = "timeLimit", length = 64)
public String getTimeLimit() {
return this.timeLimit;
}
public void setTimeLimit(String timeLimit) {
this.timeLimit = timeLimit;
}
@Column(name = "isCompeted")
public Integer getIsCompeted() {
return this.isCompeted;
}
public void setIsCompeted(Integer isCompeted) {
this.isCompeted = isCompeted;
}
@Column(name = "isSecret")
public Integer getIsSecret() {
return this.isSecret;
}
public void setIsSecret(Integer isSecret) {
this.isSecret = isSecret;
}
@Column(name = "breakBurden", length = 4000)
public String getBreakBurden() {
return this.breakBurden;
}
public void setBreakBurden(String breakBurden) {
this.breakBurden = breakBurden;
}
@Column(name = "otherItems", length = 4000)
public String getOtherItems() {
return this.otherItems;
}
public void setOtherItems(String otherItems) {
this.otherItems = otherItems;
}
@Column(name = "contractType", length = 64)
public String getContractType() {
return this.contractType;
}
public void setContractType(String contractType) {
this.contractType = contractType;
}
@Column(name = "signDate", length = 19)
public Date getSignDate() {
return this.signDate;
}
public void setSignDate(Date signDate) {
this.signDate = signDate;
}
@Column(name = "startDate", length = 19)
public Date getStartDate() {
return this.startDate;
}
public void setStartDate(Date startDate) {
this.startDate = startDate;
}
@Column(name = "expireDate", length = 19)
public Date getExpireDate() {
return this.expireDate;
}
public void setExpireDate(Date expireDate) {
this.expireDate = expireDate;
}
@ManyToMany(cascade = CascadeType.ALL, fetch = FetchType.LAZY)
@JoinTable(name = "contract_attach", joinColumns = { @JoinColumn(name = "contractId", nullable = false, updatable = false) }, inverseJoinColumns = { @JoinColumn(name = "fileId", nullable = false, updatable = false) })
public Set<FileAttach> getContractAttachs() {
return this.contractAttachs;
}
public void setContractAttachs(Set<FileAttach> contractAttachs) {
this.contractAttachs = contractAttachs;
}
@OneToMany(cascade = CascadeType.ALL, fetch = FetchType.LAZY, mappedBy = "userContract")
public Set<ContractEvent> getContractEvents() {
return this.contractEvents;
}
public void setContractEvents(Set<ContractEvent> contractEvents) {
this.contractEvents = contractEvents;
}
@Override
public boolean equals(Object object) {
if (!(object instanceof UserContract)) {
return false;
}
UserContract rhs = (UserContract) object;
return new EqualsBuilder().append(this.contractId, rhs.contractId).append(this.contractNo, rhs.contractNo)
.append(this.fullname, rhs.fullname).append(this.status, rhs.status)
.append(this.timeLimit, rhs.timeLimit).append(this.isCompeted, rhs.isCompeted)
.append(this.isSecret, rhs.isSecret).append(this.breakBurden, rhs.breakBurden)
.append(this.otherItems, rhs.otherItems).append(this.contractType, rhs.contractType)
.append(this.signDate, rhs.signDate).append(this.startDate, rhs.startDate)
.append(this.expireDate, rhs.expireDate).isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(-82280557, -700257973).append(this.contractId).append(this.contractNo)
.append(this.fullname).append(this.status).append(this.timeLimit).append(this.isCompeted)
.append(this.isSecret).append(this.breakBurden).append(this.otherItems).append(this.contractType)
.append(this.signDate).append(this.startDate).append(this.expireDate).toHashCode();
}
@Override
public String toString() {
return new ToStringBuilder(this).append("contractId", this.contractId).append("contractNo", this.contractNo)
.append("fullname", this.fullname).append("status", this.status).append("timeLimit", this.timeLimit)
.append("isCompeted", this.isCompeted).append("isSecret", this.isSecret)
.append("breakBurden", this.breakBurden).append("otherItems", this.otherItems)
.append("contractType", this.contractType).append("signDate", this.signDate)
.append("startDate", this.startDate).append("expireDate", this.expireDate).toString();
}
}
| |
package io.hypertrack.factory;
import com.google.gson.Gson;
import com.google.gson.internal.LinkedTreeMap;
import io.hypertrack.model.HyperTrackArrayList;
import io.hypertrack.model.HyperTrackModel;
import io.hypertrack.net.HyperTrackClient;
import io.hypertrack.net.HyperTrackException;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Abstract factory class for HyperTrack models.
* Factories handle all communication between models and client.
* Defines methods for create, retrieve, list models.
*
* @param <T> Class of model
*/
public abstract class HyperTrackFactory<T extends HyperTrackModel> {
/** HyperTrack client object to send requests. */
private HyperTrackClient client;
/**
* Instantiate factory class object by injecting client.
*
* @param client HyperTrack client object
*/
protected HyperTrackFactory(final HyperTrackClient client) {
this.client = client;
}
/**
* Method to instantiate build HyperTrack model objects.
*
* @param params Properties of model object
* @return HyperTrack model object
*/
protected abstract T makeNew(Map<String, Object> params);
/**
* Return url endpoint for the model.
*
* @return API endpoint for model
*/
protected abstract String getModelUrl();
/**
* Return url of model instance.
*
* @param instanceId ID of model instance
* @return API endpoint for instance
*/
public String getInstanceUrl(String instanceId) {
return getModelUrl() + instanceId + "/";
}
/**
* Retrieve model instance from client by instance id.
*
* @param id ID of model instance
* @return Model object with instance parameters
*/
public T retrieve(String id) {
try {
return getAndCreateFromId(id);
} catch (HyperTrackException e) {
e.printStackTrace();
throw e;
}
}
/**
* Create model instance with model parameters.
*
* @param params Map of instance parameters
* @return Model instance with id and other parameters
*/
public T create(Map<String, Object> params) {
try {
return postAndCreateFromParams(params);
} catch (HyperTrackException e) {
e.printStackTrace();
throw e;
}
}
/**
* Retrieve list of model instances with optional filters.
*
* @param filters Filtering parameters for retrieval
* @return Custom array list of model instances
*/
public HyperTrackArrayList<T> list(Map<String, Object> filters) {
try {
return getListAndCreate(filters, null);
} catch (HyperTrackException e) {
e.printStackTrace();
throw e;
}
}
/**
* Retrieve next page of instances list.
*
* @param list Paginated list object
* @return Next page as a list
*/
public HyperTrackArrayList<T> getNextPage(HyperTrackArrayList<T> list) {
Map<String, Object> filters = (Map)list.getNextFilters();
return getPageFromFilters(filters);
}
/**
* Retrieve previous page of instances list.
*
* @param list Paginated list object
* @return Previous page as a list
*/
public HyperTrackArrayList<T> getPreviousPage(HyperTrackArrayList<T> list) {
Map<String, Object> filters = (Map)list.getPreviousFilters();
return getPageFromFilters(filters);
}
/**
* Retrieve page of a list request.
*
* @param filters GET params to identify page
* @return Page with instances as list
*/
protected HyperTrackArrayList<T> getPageFromFilters(Map<String, Object> filters) {
if (filters == null) {
return null;
}
return list(filters);
}
/**
* Patches model instance and updates with new values.
* For photo upload on User, use driverFactory.patchWithPhoto()
*
* @param model Model instance to be patched
* @param params New params to be patched in instance
*/
public void patch(T model, Map<String, Object> params) {
try {
String instanceId = model.getId();
Map<String, Object> newParams = getPatchedParams(instanceId, params);
updateFromParams(model, newParams);
} catch (HyperTrackException e) {
e.printStackTrace();
throw e;
}
}
/**
* Delete model instance from server.
*
* @param model Instance that is to be deleted
*/
public void delete(T model) {
try {
String instanceId = model.getId();
deleteById(instanceId);
model.setProperties(null);
} catch (HyperTrackException e) {
e.printStackTrace();
throw e;
}
}
/**
* Return model instance after retrieving it from client.
*
* @param id Id of instance to be retrieved
* @return Model instance
* @throws HyperTrackException
*/
protected T getAndCreateFromId(String id) throws HyperTrackException {
String response = client.request(getInstanceUrl(id), "GET", null);
return createFromParams(getMapFromResponse(response));
}
/**
* Return model instance after creating in at server.
*
* @param params Properties of object to be created
* @return Model instance
* @throws HyperTrackException
*/
protected T postAndCreateFromParams(Map<String, Object> params) throws HyperTrackException {
String response = client.request(getModelUrl(), "POST", params);
return createFromParams(getMapFromResponse(response));
}
/**
* Return list of model instances fetched from the server.
*
* @param filters Map to filter the list
* @param customUrl If fetching list from custom endpoint, else model url is taken
* @return List of fetched instances with pagination variables
* @throws HyperTrackException
*/
protected HyperTrackArrayList<T> getListAndCreate(Map<String, Object> filters, String customUrl) throws HyperTrackException {
String url;
if (customUrl != null) {
url = customUrl;
} else {
url = getModelUrl();
}
String response = client.request(url, "GET", filters);
Map<String, Object> responseMap = getMapFromResponse(response);
ArrayList<LinkedTreeMap> results = (ArrayList)responseMap.get("results");
ArrayList<T> objectList = new ArrayList<>();
for (LinkedTreeMap temp : results) {
Map<String, Object> params = new HashMap<>(temp);
objectList.add(createFromParams(params));
}
Double countDouble = (Double)responseMap.get("count");
Integer count = countDouble.intValue();
String nextUrl = (String)responseMap.get("next");
String previousUrl = (String)responseMap.get("previous");
Map<String, String> nextParams = getParamsFromUrl(nextUrl);
Map<String, String> previousParams = getParamsFromUrl(previousUrl);
return new HyperTrackArrayList<>(count, previousParams, nextParams, objectList);
}
/**
* Delete model instance by id.
*
* @param instanceId Id of instance to be deleted
*/
protected void deleteById(String instanceId) throws HyperTrackException {
client.request(getInstanceUrl(instanceId), "DELETE", null);
}
/**
* Patch instance on the server and get new properties
*
* @param instanceId Id of instance to be patched.
* @param params Properties to be patched.
* @return
* @throws HyperTrackException
*/
protected Map<String, Object> getPatchedParams(String instanceId, Map<String, Object> params) throws HyperTrackException {
String response = client.request(getInstanceUrl(instanceId), "PATCH", params);
return getMapFromResponse(response);
}
/**
* Create new model instance from properties
*
* @param params Properties of new model instance
* @return Model instance
*/
private T createFromParams(Map<String, Object> params) {
return makeNew(params);
}
/**
* Convert response JSON string to POJO params map
*
* @param response Response string received from server
* @return Map of properties
*/
protected static Map<String, Object> getMapFromResponse(String response) {
Gson gson = new Gson();
Map<String, Object> map = new HashMap<String, Object>();
map = (Map<String, Object>)gson.fromJson(response, map.getClass());
return map;
}
/**
* Set properties of model instance.
*
* @param model Model instance to be updated
* @param params New properties for model instance
*/
protected void updateFromParams(T model, Map<String, Object> params) {
model.setProperties(params);
}
/**
* Return client object
*
* @return HyperTrack client object
*/
public HyperTrackClient getClient() {
return client;
}
/**
* Set client object as factory property
*
* @param client Instance of HyperTrack client
*/
public void setClient(HyperTrackClient client) {
this.client = client;
}
/**
* Reads GET params from a url as POJO map
*
* @param url Url to be decoded
* @return Map of GET parameters
*/
private Map<String, String> getParamsFromUrl(String url) {
Map<String, String> params = new HashMap<>();
if (url == null) {
return null;
}
try {
List<NameValuePair> rawParams = URLEncodedUtils.parse(new URI(url), "UTF-8");
for (NameValuePair param : rawParams) {
params.put(param.getName(), param.getValue());
}
return params;
} catch (final URISyntaxException e) {
throw new IllegalStateException("Invalid uri", e);
}
}
}
| |
// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.content.browser;
import android.os.AsyncTask;
import android.os.Handler;
import android.util.Log;
// This class provides functionality to:
// - synchronously load and register the native library. This is used by callers
// that can't do anything useful without the native side.
// - asynchronously load and register the native library. This is used by callers
// that can do more work in the java-side, and let a separate thread do all the
// file IO and library loading.
public class LibraryLoader {
private static final String TAG = "LibraryLoader";
private static String LIBRARY = "chromeview";
private static boolean sLoaded = false;
private static boolean sInitialized = false;
private static AsyncTask<Void, Void, Boolean> sAsyncLoader;
/**
* Callback for handling loading of the native library.
*
* <p> The callback methods will always be triggered on the UI thread.
*/
public static interface Callback {
/**
* Called when loading the native library is successful.
*/
void onSuccess();
/**
* Called when loading the native library fails.
*/
void onFailure();
}
/**
* This method blocks until the library is fully loaded and initialized;
* must be called on the thread that the native will call its "main" thread.
*/
public static void loadAndInitSync() {
checkThreadUsage();
if (sInitialized) {
// Already initialized, nothing to do.
return;
}
if (sAsyncLoader != null) {
// Async initialization in progress, wait.
waitForAsyncInitialized();
return;
}
loadNow();
initializeOnMainThread();
}
/**
* Block until the library is fully initialized.
* Must be called on the thread that the native will call its "main" thread.
*/
private static void waitForAsyncInitialized() {
checkThreadUsage();
if (sInitialized) {
// Already initialized.
return;
}
synchronized(LibraryLoader.class) {
try {
while (!sLoaded) {
LibraryLoader.class.wait();
}
// If the UI thread blocked waiting for the task it will already
// have handled the library load completion, so don't duplicate that work here.
} catch (InterruptedException e) {
}
}
initializeOnMainThread();
}
/**
* Kicks off an asynchronous library load, and will asynchronously initialize the
* library when that completes.
* Must be called on the thread that the native will call its "main" thread.
*/
public static void loadAndInitAsync(final Callback onLoadedListener) {
checkThreadUsage();
if (sInitialized) {
// Already initialized, post our Runnable if needed.
if (onLoadedListener != null) {
new Handler().post(new Runnable() {
@Override
public void run() {
onLoadedListener.onSuccess();
}
});
}
return;
}
sAsyncLoader = new AsyncTask<Void, Void, Boolean>() {
@Override
public Boolean doInBackground(Void... voids) {
// We're loading the .so in a background thread. Potentially, this
// can break native code that relies on static initializers using
// thread local storage, as the library would normally load in the
// main thread. If do we hit such cases we should remove those static
// initializers, as we chrome has banned them.
// (Worst case, we can go back to just warming up the file in the system
// cache here and do the actual loading in onPostExecute().)
return loadNow();
}
@Override
protected void onPostExecute(Boolean result) {
if (result) {
initializeOnMainThread();
if (onLoadedListener != null) onLoadedListener.onSuccess();
} else {
if (onLoadedListener != null) onLoadedListener.onFailure();
}
}
};
sAsyncLoader.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
}
/**
* @throws UnsatisfiedLinkError if the library is not yet initialized.
*/
public static void checkIsReady() {
if (!sInitialized) {
throw new UnsatisfiedLinkError(LIBRARY + " is not initialized");
}
}
/**
* Loads the library and blocks until the load completes. The caller is responsible
* for subsequently calling initialize().
* May be called on any thread, but should only be called once. Note the thread
* this is called on will be the thread that runs the native code's static initializers.
* See the comment in doInBackground() for more considerations on this.
*
* @return Whether the native library was successfully loaded.
*/
static boolean loadNow() {
assert !sInitialized;
try {
Log.i(TAG, "loading: " + LIBRARY);
System.loadLibrary(LIBRARY);
Log.i(TAG, "loaded: " + LIBRARY);
synchronized(LibraryLoader.class) {
sLoaded = true;
LibraryLoader.class.notifyAll();
}
} catch (UnsatisfiedLinkError e) {
Log.e(TAG, "error loading: " + LIBRARY, e);
return false;
}
return true;
}
/**
* initializes the library here and now: must be called on the thread that the
* native will call its "main" thread. The library must have previously been
* loaded with loadNow.
* @param initCommandLine The command line arguments that native command line will
* be initialized with.
*/
static void initializeOnMainThread(String[] initCommandLine) {
checkThreadUsage();
if (sInitialized) {
return;
}
if (!nativeLibraryLoadedOnMainThread(initCommandLine)) {
Log.e(TAG, "error calling nativeLibraryLoadedOnMainThread");
throw new UnsatisfiedLinkError();
}
// From this point on, native code is ready to use and checkIsReady()
// shouldn't complain from now on (and in fact, it's used by the
// following calls).
sInitialized = true;
CommandLine.enableNativeProxy();
TraceEvent.setEnabledToMatchNative();
}
static private void initializeOnMainThread() {
checkThreadUsage();
if (!sInitialized) {
initializeOnMainThread(CommandLine.getJavaSwitchesOrNull());
}
}
private LibraryLoader() {
}
// The public API of this class is meant to be used from a single
// thread. Internally, we may bounce to a separate thread to actually
// load the library.
private static Thread sMyThread;
private static void checkThreadUsage() {
Thread currentThread = java.lang.Thread.currentThread();
if (sMyThread == null) {
sMyThread = currentThread;
} else {
if (sMyThread != currentThread) {
Log.e(TAG, "Threading violation detected. My thread=" + sMyThread +
" but I'm being accessed from thread=" + currentThread);
assert false;
}
}
}
// This is the only method that is registered during System.loadLibrary, as it
// happens on a different thread. We then call it on the main thread to register
// everything else.
private static native boolean nativeLibraryLoadedOnMainThread(String[] initCommandLine);
}
| |
/*
* Copyright (C) 2012-2014 The Project Lombok Authors.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package lombok.javac.handlers;
import static lombok.core.handlers.HandlerUtil.*;
import static lombok.javac.Javac.*;
import static lombok.javac.handlers.JavacHandlerUtil.*;
import java.util.Collection;
import lombok.AccessLevel;
import lombok.ConfigurationKeys;
import lombok.core.AST.Kind;
import lombok.core.AnnotationValues;
import lombok.experimental.Wither;
import lombok.javac.JavacAnnotationHandler;
import lombok.javac.JavacNode;
import lombok.javac.JavacTreeMaker;
import lombok.javac.handlers.JavacHandlerUtil.CopyJavadoc;
import lombok.javac.handlers.JavacHandlerUtil.FieldAccess;
import org.mangosdk.spi.ProviderFor;
import com.sun.tools.javac.code.Flags;
import com.sun.tools.javac.tree.JCTree.JCAnnotation;
import com.sun.tools.javac.tree.JCTree.JCBlock;
import com.sun.tools.javac.tree.JCTree.JCClassDecl;
import com.sun.tools.javac.tree.JCTree.JCConditional;
import com.sun.tools.javac.tree.JCTree.JCExpression;
import com.sun.tools.javac.tree.JCTree.JCMethodDecl;
import com.sun.tools.javac.tree.JCTree.JCNewClass;
import com.sun.tools.javac.tree.JCTree.JCReturn;
import com.sun.tools.javac.tree.JCTree.JCStatement;
import com.sun.tools.javac.tree.JCTree.JCTypeParameter;
import com.sun.tools.javac.tree.JCTree.JCVariableDecl;
import com.sun.tools.javac.util.JCDiagnostic.DiagnosticPosition;
import com.sun.tools.javac.util.List;
import com.sun.tools.javac.util.ListBuffer;
import com.sun.tools.javac.util.Name;
/**
* Handles the {@code lombok.experimental.Wither} annotation for javac.
*/
@ProviderFor(JavacAnnotationHandler.class)
public class HandleWither extends JavacAnnotationHandler<Wither> {
public void generateWitherForType(JavacNode typeNode, JavacNode errorNode, AccessLevel level, boolean checkForTypeLevelWither) {
if (checkForTypeLevelWither) {
if (hasAnnotation(Wither.class, typeNode)) {
//The annotation will make it happen, so we can skip it.
return;
}
}
JCClassDecl typeDecl = null;
if (typeNode.get() instanceof JCClassDecl) typeDecl = (JCClassDecl) typeNode.get();
long modifiers = typeDecl == null ? 0 : typeDecl.mods.flags;
boolean notAClass = (modifiers & (Flags.INTERFACE | Flags.ANNOTATION | Flags.ENUM)) != 0;
if (typeDecl == null || notAClass) {
errorNode.addError("@Wither is only supported on a class or a field.");
return;
}
for (JavacNode field : typeNode.down()) {
if (field.getKind() != Kind.FIELD) continue;
JCVariableDecl fieldDecl = (JCVariableDecl) field.get();
//Skip fields that start with $
if (fieldDecl.name.toString().startsWith("$")) continue;
//Skip static fields.
if ((fieldDecl.mods.flags & Flags.STATIC) != 0) continue;
//Skip final initialized fields.
if ((fieldDecl.mods.flags & Flags.FINAL) != 0 && fieldDecl.init != null) continue;
generateWitherForField(field, errorNode.get(), level);
}
}
/**
* Generates a wither on the stated field.
*
* Used by {@link HandleValue}.
*
* The difference between this call and the handle method is as follows:
*
* If there is a {@code lombok.experimental.Wither} annotation on the field, it is used and the
* same rules apply (e.g. warning if the method already exists, stated access level applies).
* If not, the wither is still generated if it isn't already there, though there will not
* be a warning if its already there. The default access level is used.
*
* @param fieldNode The node representing the field you want a wither for.
* @param pos The node responsible for generating the wither (the {@code @Value} or {@code @Wither} annotation).
*/
public void generateWitherForField(JavacNode fieldNode, DiagnosticPosition pos, AccessLevel level) {
if (hasAnnotation(Wither.class, fieldNode)) {
//The annotation will make it happen, so we can skip it.
return;
}
createWitherForField(level, fieldNode, fieldNode, false, List.<JCAnnotation>nil(), List.<JCAnnotation>nil());
}
@Override public void handle(AnnotationValues<Wither> annotation, JCAnnotation ast, JavacNode annotationNode) {
handleExperimentalFlagUsage(annotationNode, ConfigurationKeys.WITHER_FLAG_USAGE, "@Wither");
Collection<JavacNode> fields = annotationNode.upFromAnnotationToFields();
deleteAnnotationIfNeccessary(annotationNode, Wither.class);
deleteImportFromCompilationUnit(annotationNode, "lombok.AccessLevel");
JavacNode node = annotationNode.up();
AccessLevel level = annotation.getInstance().value();
if (level == AccessLevel.NONE || node == null) return;
List<JCAnnotation> onMethod = unboxAndRemoveAnnotationParameter(ast, "onMethod", "@Wither(onMethod=", annotationNode);
List<JCAnnotation> onParam = unboxAndRemoveAnnotationParameter(ast, "onParam", "@Wither(onParam=", annotationNode);
switch (node.getKind()) {
case FIELD:
createWitherForFields(level, fields, annotationNode, true, onMethod, onParam);
break;
case TYPE:
if (!onMethod.isEmpty()) annotationNode.addError("'onMethod' is not supported for @Wither on a type.");
if (!onParam.isEmpty()) annotationNode.addError("'onParam' is not supported for @Wither on a type.");
generateWitherForType(node, annotationNode, level, false);
break;
}
}
public void createWitherForFields(AccessLevel level, Collection<JavacNode> fieldNodes, JavacNode errorNode, boolean whineIfExists, List<JCAnnotation> onMethod, List<JCAnnotation> onParam) {
for (JavacNode fieldNode : fieldNodes) {
createWitherForField(level, fieldNode, errorNode, whineIfExists, onMethod, onParam);
}
}
public void createWitherForField(AccessLevel level, JavacNode fieldNode, JavacNode source, boolean whineIfExists, List<JCAnnotation> onMethod, List<JCAnnotation> onParam) {
if (fieldNode.getKind() != Kind.FIELD) {
fieldNode.addError("@Wither is only supported on a class or a field.");
return;
}
JCVariableDecl fieldDecl = (JCVariableDecl)fieldNode.get();
String methodName = toWitherName(fieldNode);
if (methodName == null) {
fieldNode.addWarning("Not generating wither for this field: It does not fit your @Accessors prefix list.");
return;
}
if ((fieldDecl.mods.flags & Flags.STATIC) != 0) {
fieldNode.addWarning("Not generating wither for this field: Withers cannot be generated for static fields.");
return;
}
if ((fieldDecl.mods.flags & Flags.FINAL) != 0 && fieldDecl.init != null) {
fieldNode.addWarning("Not generating wither for this field: Withers cannot be generated for final, initialized fields.");
return;
}
if (fieldDecl.name.toString().startsWith("$")) {
fieldNode.addWarning("Not generating wither for this field: Withers cannot be generated for fields starting with $.");
return;
}
for (String altName : toAllWitherNames(fieldNode)) {
switch (methodExists(altName, fieldNode, false, 1)) {
case EXISTS_BY_LOMBOK:
return;
case EXISTS_BY_USER:
if (whineIfExists) {
String altNameExpl = "";
if (!altName.equals(methodName)) altNameExpl = String.format(" (%s)", altName);
fieldNode.addWarning(
String.format("Not generating %s(): A method with that name already exists%s", methodName, altNameExpl));
}
return;
default:
case NOT_EXISTS:
//continue scanning the other alt names.
}
}
long access = toJavacModifier(level);
JCMethodDecl createdWither = createWither(access, fieldNode, fieldNode.getTreeMaker(), source, onMethod, onParam);
injectMethod(fieldNode.up(), createdWither);
}
public JCMethodDecl createWither(long access, JavacNode field, JavacTreeMaker maker, JavacNode source, List<JCAnnotation> onMethod, List<JCAnnotation> onParam) {
String witherName = toWitherName(field);
if (witherName == null) return null;
JCVariableDecl fieldDecl = (JCVariableDecl) field.get();
ListBuffer<JCStatement> statements = new ListBuffer<JCStatement>();
List<JCAnnotation> nonNulls = findAnnotations(field, NON_NULL_PATTERN);
List<JCAnnotation> nullables = findAnnotations(field, NULLABLE_PATTERN);
Name methodName = field.toName(witherName);
List<JCAnnotation> annsOnParam = copyAnnotations(onParam).appendList(nonNulls).appendList(nullables);
long flags = JavacHandlerUtil.addFinalIfNeeded(Flags.PARAMETER, field.getContext());
JCVariableDecl param = maker.VarDef(maker.Modifiers(flags, annsOnParam), fieldDecl.name, fieldDecl.vartype, null);
JCExpression selfType = cloneSelfType(field);
if (selfType == null) return null;
ListBuffer<JCExpression> args = new ListBuffer<JCExpression>();
for (JavacNode child : field.up().down()) {
if (child.getKind() != Kind.FIELD) continue;
JCVariableDecl childDecl = (JCVariableDecl) child.get();
// Skip fields that start with $
if (childDecl.name.toString().startsWith("$")) continue;
long fieldFlags = childDecl.mods.flags;
// Skip static fields.
if ((fieldFlags & Flags.STATIC) != 0) continue;
// Skip initialized final fields.
if (((fieldFlags & Flags.FINAL) != 0) && childDecl.init != null) continue;
if (child.get() == field.get()) {
args.append(maker.Ident(fieldDecl.name));
} else {
args.append(createFieldAccessor(maker, child, FieldAccess.ALWAYS_FIELD));
}
}
JCNewClass newClass = maker.NewClass(null, List.<JCExpression>nil(), selfType, args.toList(), null);
JCExpression identityCheck = maker.Binary(CTC_EQUAL, createFieldAccessor(maker, field, FieldAccess.ALWAYS_FIELD), maker.Ident(fieldDecl.name));
JCConditional conditional = maker.Conditional(identityCheck, maker.Ident(field.toName("this")), newClass);
JCReturn returnStatement = maker.Return(conditional);
if (nonNulls.isEmpty()) {
statements.append(returnStatement);
} else {
JCStatement nullCheck = generateNullCheck(maker, field, source);
if (nullCheck != null) statements.append(nullCheck);
statements.append(returnStatement);
}
JCExpression returnType = cloneSelfType(field);
JCBlock methodBody = maker.Block(0, statements.toList());
List<JCTypeParameter> methodGenericParams = List.nil();
List<JCVariableDecl> parameters = List.of(param);
List<JCExpression> throwsClauses = List.nil();
JCExpression annotationMethodDefaultValue = null;
List<JCAnnotation> annsOnMethod = copyAnnotations(onMethod);
if (isFieldDeprecated(field)) {
annsOnMethod = annsOnMethod.prepend(maker.Annotation(genJavaLangTypeRef(field, "Deprecated"), List.<JCExpression>nil()));
}
JCMethodDecl decl = recursiveSetGeneratedBy(maker.MethodDef(maker.Modifiers(access, annsOnMethod), methodName, returnType,
methodGenericParams, parameters, throwsClauses, methodBody, annotationMethodDefaultValue), source.get(), field.getContext());
copyJavadoc(field, decl, CopyJavadoc.WITHER);
return decl;
}
}
| |
/**
* Copyright 2011 Google Inc.
* Copyright 2014 Andreas Schildbach
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.bitcoin.core;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.math.BigInteger;
import java.util.Arrays;
import static com.google.common.base.Preconditions.checkState;
/**
* <p>A Message is a data structure that can be serialized/deserialized using both the Bitcoin proprietary serialization
* format and built-in Java object serialization. Specific types of messages that are used both in the block chain,
* and on the wire, are derived from this class.</p>
*/
public abstract class Message implements Serializable {
private static final Logger log = LoggerFactory.getLogger(Message.class);
private static final long serialVersionUID = -3561053461717079135L;
public static final int MAX_SIZE = 0x02000000; // 32MB
public static final int UNKNOWN_LENGTH = Integer.MIN_VALUE;
// Useful to ensure serialize/deserialize are consistent with each other.
private static final boolean SELF_CHECK = false;
// The offset is how many bytes into the provided byte array this message payload starts at.
protected transient int offset;
// The cursor keeps track of where we are in the byte array as we parse it.
// Note that it's relative to the start of the array NOT the start of the message payload.
protected transient int cursor;
protected transient int length = UNKNOWN_LENGTH;
// The raw message payload bytes themselves.
protected transient byte[] payload;
protected transient boolean parsed = false;
protected transient boolean recached = false;
protected transient final boolean parseLazy;
protected transient final boolean parseRetain;
protected transient int protocolVersion;
protected transient byte[] checksum;
// This will be saved by subclasses that implement Serializable.
protected NetworkParameters params;
/**
* This exists for the Java serialization framework to use only.
*/
protected Message() {
parsed = true;
parseLazy = false;
parseRetain = false;
}
Message(NetworkParameters params) {
this.params = params;
parsed = true;
parseLazy = false;
parseRetain = false;
}
Message(NetworkParameters params, byte[] payload, int offset, int protocolVersion) throws ProtocolException {
this(params, payload, offset, protocolVersion, false, false, UNKNOWN_LENGTH);
}
/**
*
* @param params NetworkParameters object.
* @param payload Bitcoin protocol formatted byte array containing message content.
* @param offset The location of the first payload byte within the array.
* @param protocolVersion Bitcoin protocol version.
* @param parseLazy Whether to perform a full parse immediately or delay until a read is requested.
* @param parseRetain Whether to retain the backing byte array for quick reserialization.
* If true and the backing byte array is invalidated due to modification of a field then
* the cached bytes may be repopulated and retained if the message is serialized again in the future.
* @param length The length of message payload if known. Usually this is provided when deserializing of the wire
* as the length will be provided as part of the header. If unknown then set to Message.UNKNOWN_LENGTH
* @throws ProtocolException
*/
Message(NetworkParameters params, byte[] payload, int offset, int protocolVersion, boolean parseLazy, boolean parseRetain, int length) throws ProtocolException {
this.parseLazy = parseLazy;
this.parseRetain = parseRetain;
this.protocolVersion = protocolVersion;
this.params = params;
this.payload = payload;
this.cursor = this.offset = offset;
this.length = length;
if (parseLazy) {
parseLite();
} else {
parseLite();
parse();
parsed = true;
}
if (this.length == UNKNOWN_LENGTH)
checkState(false, "Length field has not been set in constructor for %s after %s parse. " +
"Refer to Message.parseLite() for detail of required Length field contract.",
getClass().getSimpleName(), parseLazy ? "lite" : "full");
if (SELF_CHECK) {
selfCheck(payload, offset);
}
if (parseRetain || !parsed)
return;
this.payload = null;
}
private void selfCheck(byte[] payload, int offset) {
if (!(this instanceof VersionMessage)) {
maybeParse();
byte[] payloadBytes = new byte[cursor - offset];
System.arraycopy(payload, offset, payloadBytes, 0, cursor - offset);
byte[] reserialized = bitcoinSerialize();
if (!Arrays.equals(reserialized, payloadBytes))
throw new RuntimeException("Serialization is wrong: \n" +
Utils.HEX.encode(reserialized) + " vs \n" +
Utils.HEX.encode(payloadBytes));
}
}
Message(NetworkParameters params, byte[] payload, int offset) throws ProtocolException {
this(params, payload, offset, NetworkParameters.PROTOCOL_VERSION, false, false, UNKNOWN_LENGTH);
}
Message(NetworkParameters params, byte[] payload, int offset, boolean parseLazy, boolean parseRetain, int length) throws ProtocolException {
this(params, payload, offset, NetworkParameters.PROTOCOL_VERSION, parseLazy, parseRetain, length);
}
// These methods handle the serialization/deserialization using the custom Bitcoin protocol.
// It's somewhat painful to work with in Java, so some of these objects support a second
// serialization mechanism - the standard Java serialization system. This is used when things
// are serialized to the wallet.
abstract void parse() throws ProtocolException;
/**
* Perform the most minimal parse possible to calculate the length of the message payload.
* This is only required for subclasses of ChildMessage as root level messages will have their length passed
* into the constructor.
* <p/>
* Implementations should adhere to the following contract: If parseLazy = true the 'length'
* field must be set before returning. If parseLazy = false the length field must be set either
* within the parseLite() method OR the parse() method. The overriding requirement is that length
* must be set to non UNKNOWN_MESSAGE value by the time the constructor exits.
*
* @return
* @throws ProtocolException
*/
protected abstract void parseLite() throws ProtocolException;
/**
* Ensure the object is parsed if needed. This should be called in every getter before returning a value.
* If the lazy parse flag is not set this is a method returns immediately.
*/
protected synchronized void maybeParse() {
if (parsed || payload == null)
return;
try {
parse();
parsed = true;
if (!parseRetain)
payload = null;
} catch (ProtocolException e) {
throw new LazyParseException("ProtocolException caught during lazy parse. For safe access to fields call ensureParsed before attempting read or write access", e);
}
}
/**
* In lazy parsing mode access to getters and setters may throw an unchecked LazyParseException. If guaranteed safe access is required
* this method will force parsing to occur immediately thus ensuring LazyParseExeption will never be thrown from this Message.
* If the Message contains child messages (e.g. a Block containing Transaction messages) this will not force child messages to parse.
* <p/>
* This could be overidden for Transaction and it's child classes to ensure the entire tree of Message objects is parsed.
*
* @throws ProtocolException
*/
public void ensureParsed() throws ProtocolException {
try {
maybeParse();
} catch (LazyParseException e) {
if (e.getCause() instanceof ProtocolException)
throw (ProtocolException) e.getCause();
throw new ProtocolException(e);
}
}
/**
* To be called before any change of internal values including any setters. This ensures any cached byte array is
* removed after performing a lazy parse if necessary to ensure the object is fully populated.
* <p/>
* Child messages of this object(e.g. Transactions belonging to a Block) will not have their internal byte caches
* invalidated unless they are also modified internally.
*/
protected void unCache() {
maybeParse();
checksum = null;
payload = null;
recached = false;
}
protected void adjustLength(int newArraySize, int adjustment) {
if (length == UNKNOWN_LENGTH)
return;
// Our own length is now unknown if we have an unknown length adjustment.
if (adjustment == UNKNOWN_LENGTH) {
length = UNKNOWN_LENGTH;
return;
}
length += adjustment;
// Check if we will need more bytes to encode the length prefix.
if (newArraySize == 1)
length++; // The assumption here is we never call adjustLength with the same arraySize as before.
else if (newArraySize != 0)
length += VarInt.sizeOf(newArraySize) - VarInt.sizeOf(newArraySize - 1);
}
/**
* used for unit testing
*/
public boolean isParsed() {
return parsed;
}
/**
* used for unit testing
*/
public boolean isCached() {
return payload != null;
}
public boolean isRecached() {
return recached;
}
/**
* Should only used by BitcoinSerializer for cached checksum
*
* @return the checksum
*/
byte[] getChecksum() {
return checksum;
}
/**
* Should only used by BitcoinSerializer for caching checksum
*
* @param checksum the checksum to set
*/
void setChecksum(byte[] checksum) {
if (checksum.length != 4)
throw new IllegalArgumentException("Checksum length must be 4 bytes, actual length: " + checksum.length);
this.checksum = checksum;
}
/**
* Returns a copy of the array returned by {@link Message#unsafeBitcoinSerialize()}, which is safe to mutate.
* If you need extra performance and can guarantee you won't write to the array, you can use the unsafe version.
*
* @return a freshly allocated serialized byte array
*/
public byte[] bitcoinSerialize() {
byte[] bytes = unsafeBitcoinSerialize();
byte[] copy = new byte[bytes.length];
System.arraycopy(bytes, 0, copy, 0, bytes.length);
return copy;
}
/**
* Serialize this message to a byte array that conforms to the bitcoin wire protocol.
* <br/>
* This method may return the original byte array used to construct this message if the
* following conditions are met:
* <ol>
* <li>1) The message was parsed from a byte array with parseRetain = true</li>
* <li>2) The message has not been modified</li>
* <li>3) The array had an offset of 0 and no surplus bytes</li>
* </ol>
*
* If condition 3 is not met then an copy of the relevant portion of the array will be returned.
* Otherwise a full serialize will occur. For this reason you should only use this API if you can guarantee you
* will treat the resulting array as read only.
*
* @return a byte array owned by this object, do NOT mutate it.
*/
public byte[] unsafeBitcoinSerialize() {
// 1st attempt to use a cached array.
if (payload != null) {
if (offset == 0 && length == payload.length) {
// Cached byte array is the entire message with no extras so we can return as is and avoid an array
// copy.
return payload;
}
byte[] buf = new byte[length];
System.arraycopy(payload, offset, buf, 0, length);
return buf;
}
// No cached array available so serialize parts by stream.
ByteArrayOutputStream stream = new UnsafeByteArrayOutputStream(length < 32 ? 32 : length + 32);
try {
bitcoinSerializeToStream(stream);
} catch (IOException e) {
// Cannot happen, we are serializing to a memory stream.
}
if (parseRetain) {
// A free set of steak knives!
// If there happens to be a call to this method we gain an opportunity to recache
// the byte array and in this case it contains no bytes from parent messages.
// This give a dual benefit. Releasing references to the larger byte array so that it
// it is more likely to be GC'd. And preventing double serializations. E.g. calculating
// merkle root calls this method. It is will frequently happen prior to serializing the block
// which means another call to bitcoinSerialize is coming. If we didn't recache then internal
// serialization would occur a 2nd time and every subsequent time the message is serialized.
payload = stream.toByteArray();
cursor = cursor - offset;
offset = 0;
recached = true;
length = payload.length;
return payload;
}
// Record length. If this Message wasn't parsed from a byte stream it won't have length field
// set (except for static length message types). Setting it makes future streaming more efficient
// because we can preallocate the ByteArrayOutputStream buffer and avoid resizing.
byte[] buf = stream.toByteArray();
length = buf.length;
return buf;
}
/**
* Serialize this message to the provided OutputStream using the bitcoin wire format.
*
* @param stream
* @throws IOException
*/
final public void bitcoinSerialize(OutputStream stream) throws IOException {
// 1st check for cached bytes.
if (payload != null && length != UNKNOWN_LENGTH) {
stream.write(payload, offset, length);
return;
}
bitcoinSerializeToStream(stream);
}
/**
* Serializes this message to the provided stream. If you just want the raw bytes use bitcoinSerialize().
*/
void bitcoinSerializeToStream(OutputStream stream) throws IOException {
log.error("Error: {} class has not implemented bitcoinSerializeToStream method. Generating message with no payload", getClass());
}
/**
* This method is a NOP for all classes except Block and Transaction. It is only declared in Message
* so BitcoinSerializer can avoid 2 instanceof checks + a casting.
*/
public Sha256Hash getHash() {
throw new UnsupportedOperationException();
}
/**
* This should be overridden to extract correct message size in the case of lazy parsing. Until this method is
* implemented in a subclass of ChildMessage lazy parsing may have no effect.
*
* This default implementation is a safe fall back that will ensure it returns a correct value by parsing the message.
*/
public int getMessageSize() {
if (length != UNKNOWN_LENGTH)
return length;
maybeParse();
if (length == UNKNOWN_LENGTH)
checkState(false, "Length field has not been set in %s after full parse.", getClass().getSimpleName());
return length;
}
long readUint32() throws ProtocolException {
try {
long u = Utils.readUint32(payload, cursor);
cursor += 4;
return u;
} catch (ArrayIndexOutOfBoundsException e) {
throw new ProtocolException(e);
}
}
Sha256Hash readHash() throws ProtocolException {
try {
byte[] hash = new byte[32];
System.arraycopy(payload, cursor, hash, 0, 32);
// We have to flip it around, as it's been read off the wire in little endian.
// Not the most efficient way to do this but the clearest.
hash = Utils.reverseBytes(hash);
cursor += 32;
return new Sha256Hash(hash);
} catch (IndexOutOfBoundsException e) {
throw new ProtocolException(e);
}
}
long readInt64() throws ProtocolException {
try {
long u = Utils.readInt64(payload, cursor);
cursor += 8;
return u;
} catch (ArrayIndexOutOfBoundsException e) {
throw new ProtocolException(e);
}
}
BigInteger readUint64() throws ProtocolException {
try {
// Java does not have an unsigned 64 bit type. So scrape it off the wire then flip.
byte[] valbytes = new byte[8];
System.arraycopy(payload, cursor, valbytes, 0, 8);
valbytes = Utils.reverseBytes(valbytes);
cursor += valbytes.length;
return new BigInteger(valbytes);
} catch (IndexOutOfBoundsException e) {
throw new ProtocolException(e);
}
}
long readVarInt() throws ProtocolException {
return readVarInt(0);
}
long readVarInt(int offset) throws ProtocolException {
try {
VarInt varint = new VarInt(payload, cursor + offset);
cursor += offset + varint.getOriginalSizeInBytes();
return varint.value;
} catch (ArrayIndexOutOfBoundsException e) {
throw new ProtocolException(e);
}
}
byte[] readBytes(int length) throws ProtocolException {
if (length > MAX_SIZE) {
throw new ProtocolException("Claimed byte array length too large: " + length);
}
try {
byte[] b = new byte[length];
System.arraycopy(payload, cursor, b, 0, length);
cursor += length;
return b;
} catch (IndexOutOfBoundsException e) {
throw new ProtocolException(e);
}
}
byte[] readByteArray() throws ProtocolException {
long len = readVarInt();
return readBytes((int)len);
}
String readStr() throws ProtocolException {
try {
VarInt varInt = new VarInt(payload, cursor);
if (varInt.value == 0) {
cursor += 1;
return "";
}
cursor += varInt.getOriginalSizeInBytes();
if (varInt.value > MAX_SIZE) {
throw new ProtocolException("Claimed var_str length too large: " + varInt.value);
}
byte[] characters = new byte[(int) varInt.value];
System.arraycopy(payload, cursor, characters, 0, characters.length);
cursor += characters.length;
try {
return new String(characters, "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e); // Cannot happen, UTF-8 is always supported.
}
} catch (ArrayIndexOutOfBoundsException e) {
throw new ProtocolException(e);
} catch (IndexOutOfBoundsException e) {
throw new ProtocolException(e);
}
}
boolean hasMoreBytes() {
return cursor < payload.length;
}
/** Network parameters this message was created with. */
public NetworkParameters getParams() {
return params;
}
public static class LazyParseException extends RuntimeException {
private static final long serialVersionUID = 6971943053112975594L;
public LazyParseException(String message, Throwable cause) {
super(message, cause);
}
public LazyParseException(String message) {
super(message);
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.testFramework;
import com.intellij.codeInsight.CodeInsightSettings;
import com.intellij.diagnostic.PerformanceWatcher;
import com.intellij.ide.IdeEventQueue;
import com.intellij.mock.MockApplication;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.application.impl.ApplicationInfoImpl;
import com.intellij.openapi.command.impl.StartMarkAction;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.*;
import com.intellij.openapi.util.io.FileSystemUtil;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.*;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.codeStyle.CodeStyleSchemes;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.impl.source.PostprocessReformattingAspect;
import com.intellij.refactoring.rename.inplace.InplaceRefactoring;
import com.intellij.rt.execution.junit.FileComparisonFailure;
import com.intellij.testFramework.exceptionCases.AbstractExceptionCase;
import com.intellij.util.*;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.hash.HashMap;
import com.intellij.util.ui.UIUtil;
import gnu.trove.THashSet;
import junit.framework.AssertionFailedError;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.intellij.lang.annotations.RegExp;
import org.jdom.Element;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.junit.Assert;
import sun.awt.AWTAutoShutdown;
import javax.swing.*;
import javax.swing.Timer;
import java.awt.*;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.security.SecureRandom;
import java.util.*;
import java.util.List;
import java.util.concurrent.DelayQueue;
import java.util.concurrent.Delayed;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
/**
* @author peter
*/
@SuppressWarnings("UseOfSystemOutOrSystemErr")
public abstract class UsefulTestCase extends TestCase {
public static final boolean IS_UNDER_TEAMCITY = System.getenv("TEAMCITY_VERSION") != null;
public static final String IDEA_MARKER_CLASS = "com.intellij.openapi.components.impl.stores.IdeaProjectStoreImpl";
public static final String TEMP_DIR_MARKER = "unitTest_";
protected static boolean OVERWRITE_TESTDATA = false;
private static final String DEFAULT_SETTINGS_EXTERNALIZED;
private static final Random RNG = new SecureRandom();
private static final String ORIGINAL_TEMP_DIR = FileUtil.getTempDirectory();
public static Map<String, Long> TOTAL_SETUP_COST_MILLIS = new HashMap<String, Long>();
public static Map<String, Long> TOTAL_TEARDOWN_COST_MILLIS = new HashMap<String, Long>();
protected final Disposable myTestRootDisposable = new Disposable() {
@Override
public void dispose() { }
@Override
public String toString() {
String testName = getTestName(false);
return UsefulTestCase.this.getClass() + (StringUtil.isEmpty(testName) ? "" : ".test" + testName);
}
};
protected static String ourPathToKeep = null;
private CodeStyleSettings myOldCodeStyleSettings;
private String myTempDir;
protected static final Key<String> CREATION_PLACE = Key.create("CREATION_PLACE");
static {
// Radar #5755208: Command line Java applications need a way to launch without a Dock icon.
System.setProperty("apple.awt.UIElement", "true");
try {
CodeInsightSettings defaultSettings = new CodeInsightSettings();
Element oldS = new Element("temp");
defaultSettings.writeExternal(oldS);
DEFAULT_SETTINGS_EXTERNALIZED = JDOMUtil.writeElement(oldS, "\n");
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
protected boolean shouldContainTempFiles() {
return true;
}
@Override
protected void setUp() throws Exception {
super.setUp();
if (shouldContainTempFiles()) {
String testName = getTestName(true);
if (StringUtil.isEmptyOrSpaces(testName)) testName = "";
testName = new File(testName).getName(); // in case the test name contains file separators
myTempDir = FileUtil.toSystemDependentName(ORIGINAL_TEMP_DIR + "/" + TEMP_DIR_MARKER + testName + "_"+ RNG.nextInt(1000));
FileUtil.resetCanonicalTempPathCache(myTempDir);
}
ApplicationInfoImpl.setInPerformanceTest(isPerformanceTest());
}
@Override
protected void tearDown() throws Exception {
try {
Disposer.dispose(myTestRootDisposable);
cleanupSwingDataStructures();
cleanupDeleteOnExitHookList();
}
finally {
if (shouldContainTempFiles()) {
FileUtil.resetCanonicalTempPathCache(ORIGINAL_TEMP_DIR);
if (ourPathToKeep != null && FileUtil.isAncestor(myTempDir, ourPathToKeep, false)) {
File[] files = new File(myTempDir).listFiles();
if (files != null) {
for (File file : files) {
if (!FileUtil.pathsEqual(file.getPath(), ourPathToKeep)) {
FileUtil.delete(file);
}
}
}
}
else {
FileUtil.delete(new File(myTempDir));
}
}
}
UIUtil.removeLeakingAppleListeners();
super.tearDown();
}
private static final Set<String> DELETE_ON_EXIT_HOOK_DOT_FILES;
private static final Class DELETE_ON_EXIT_HOOK_CLASS;
static {
Class<?> aClass;
try {
aClass = Class.forName("java.io.DeleteOnExitHook");
}
catch (Exception e) {
throw new RuntimeException(e);
}
Set<String> files = ReflectionUtil.getStaticFieldValue(aClass, Set.class, "files");
DELETE_ON_EXIT_HOOK_CLASS = aClass;
DELETE_ON_EXIT_HOOK_DOT_FILES = files;
}
public static void cleanupDeleteOnExitHookList() throws ClassNotFoundException, NoSuchFieldException, IllegalAccessException {
// try to reduce file set retained by java.io.DeleteOnExitHook
List<String> list;
synchronized (DELETE_ON_EXIT_HOOK_CLASS) {
if (DELETE_ON_EXIT_HOOK_DOT_FILES.isEmpty()) return;
list = new ArrayList<String>(DELETE_ON_EXIT_HOOK_DOT_FILES);
}
for (int i = list.size() - 1; i >= 0; i--) {
String path = list.get(i);
if (FileSystemUtil.getAttributes(path) == null || new File(path).delete()) {
synchronized (DELETE_ON_EXIT_HOOK_CLASS) {
DELETE_ON_EXIT_HOOK_DOT_FILES.remove(path);
}
}
}
}
private static void cleanupSwingDataStructures() throws Exception {
Object manager = ReflectionUtil.getDeclaredMethod(Class.forName("javax.swing.KeyboardManager"), "getCurrentManager").invoke(null);
Map componentKeyStrokeMap = ReflectionUtil.getField(manager.getClass(), manager, Hashtable.class, "componentKeyStrokeMap");
componentKeyStrokeMap.clear();
Map containerMap = ReflectionUtil.getField(manager.getClass(), manager, Hashtable.class, "containerMap");
containerMap.clear();
}
protected CompositeException checkForSettingsDamage() throws Exception {
Application app = ApplicationManager.getApplication();
if (isPerformanceTest() || app == null || app instanceof MockApplication) {
return new CompositeException();
}
CodeStyleSettings oldCodeStyleSettings = myOldCodeStyleSettings;
myOldCodeStyleSettings = null;
return doCheckForSettingsDamage(oldCodeStyleSettings, getCurrentCodeStyleSettings());
}
public static CompositeException doCheckForSettingsDamage(@NotNull CodeStyleSettings oldCodeStyleSettings,
@NotNull CodeStyleSettings currentCodeStyleSettings) throws Exception {
CompositeException result = new CompositeException();
final CodeInsightSettings settings = CodeInsightSettings.getInstance();
try {
Element newS = new Element("temp");
settings.writeExternal(newS);
Assert.assertEquals("Code insight settings damaged", DEFAULT_SETTINGS_EXTERNALIZED, JDOMUtil.writeElement(newS, "\n"));
}
catch (AssertionError error) {
CodeInsightSettings clean = new CodeInsightSettings();
for (Field field : clean.getClass().getFields()) {
try {
ReflectionUtil.copyFieldValue(clean, settings, field);
}
catch (Exception ignored) {
}
}
result.add(error);
}
currentCodeStyleSettings.getIndentOptions(StdFileTypes.JAVA);
try {
checkSettingsEqual(oldCodeStyleSettings, currentCodeStyleSettings, "Code style settings damaged");
}
catch (AssertionError e) {
result.add(e);
}
finally {
currentCodeStyleSettings.clearCodeStyleSettings();
}
try {
InplaceRefactoring.checkCleared();
}
catch (AssertionError e) {
result.add(e);
}
try {
StartMarkAction.checkCleared();
}
catch (AssertionError e) {
result.add(e);
}
return result;
}
protected void storeSettings() {
if (!isPerformanceTest() && ApplicationManager.getApplication() != null) {
myOldCodeStyleSettings = getCurrentCodeStyleSettings().clone();
myOldCodeStyleSettings.getIndentOptions(StdFileTypes.JAVA);
}
}
protected CodeStyleSettings getCurrentCodeStyleSettings() {
if (CodeStyleSchemes.getInstance().getCurrentScheme() == null) return new CodeStyleSettings();
return CodeStyleSettingsManager.getInstance().getCurrentSettings();
}
public Disposable getTestRootDisposable() {
return myTestRootDisposable;
}
@Override
protected void runTest() throws Throwable {
final Throwable[] throwables = new Throwable[1];
Runnable runnable = new Runnable() {
@Override
public void run() {
try {
UsefulTestCase.super.runTest();
}
catch (InvocationTargetException e) {
e.fillInStackTrace();
throwables[0] = e.getTargetException();
}
catch (IllegalAccessException e) {
e.fillInStackTrace();
throwables[0] = e;
}
catch (Throwable e) {
throwables[0] = e;
}
}
};
invokeTestRunnable(runnable);
if (throwables[0] != null) {
throw throwables[0];
}
}
protected boolean shouldRunTest() {
return PlatformTestUtil.canRunTest(getClass());
}
public static void edt(Runnable r) {
UIUtil.invokeAndWaitIfNeeded(r);
}
protected void invokeTestRunnable(@NotNull Runnable runnable) throws Exception {
UIUtil.invokeAndWaitIfNeeded(runnable);
//runnable.run();
}
protected void defaultRunBare() throws Throwable {
Throwable exception = null;
long setupStart = System.nanoTime();
setUp();
long setupCost = (System.nanoTime() - setupStart) / 1000000;
logPerClassCost(setupCost, TOTAL_SETUP_COST_MILLIS);
try {
runTest();
} catch (Throwable running) {
exception = running;
} finally {
try {
long teardownStart = System.nanoTime();
tearDown();
long teardownCost = (System.nanoTime() - teardownStart) / 1000000;
logPerClassCost(teardownCost, TOTAL_TEARDOWN_COST_MILLIS);
} catch (Throwable tearingDown) {
if (exception == null) exception = tearingDown;
}
}
if (exception != null) throw exception;
}
/**
* Logs the setup cost grouped by test fixture class (superclass of the current test class).
*
* @param cost setup cost in milliseconds
*/
private void logPerClassCost(long cost, Map<String, Long> costMap) {
Class<?> superclass = getClass().getSuperclass();
Long oldCost = costMap.get(superclass.getName());
long newCost = oldCost == null ? cost : oldCost + cost;
costMap.put(superclass.getName(), newCost);
}
public static void logSetupTeardownCosts() {
long totalSetup = 0, totalTeardown = 0;
System.out.println("Setup costs");
for (Map.Entry<String, Long> entry : TOTAL_SETUP_COST_MILLIS.entrySet()) {
System.out.println(String.format(" %s: %d ms", entry.getKey(), entry.getValue()));
totalSetup += entry.getValue();
}
System.out.println("Teardown costs");
for (Map.Entry<String, Long> entry : TOTAL_TEARDOWN_COST_MILLIS.entrySet()) {
System.out.println(String.format(" %s: %d ms", entry.getKey(), entry.getValue()));
totalTeardown += entry.getValue();
}
System.out.println(String.format("Total overhead: setup %d ms, teardown %d ms", totalSetup, totalTeardown));
System.out.println(String.format("##teamcity[buildStatisticValue key='ideaTests.totalSetupMs' value='%d']", totalSetup));
System.out.println(String.format("##teamcity[buildStatisticValue key='ideaTests.totalTeardownMs' value='%d']", totalTeardown));
}
public static void replaceIdeEventQueueSafely() {
if (Toolkit.getDefaultToolkit().getSystemEventQueue() instanceof IdeEventQueue) {
return;
}
if (SwingUtilities.isEventDispatchThread()) {
throw new RuntimeException("must not call under EDT");
}
AWTAutoShutdown.getInstance().notifyThreadBusy(Thread.currentThread());
UIUtil.pump();
// in JDK 1.6 java.awt.EventQueue.push() causes slow painful death of current EDT
// so we have to wait through its agony to termination
try {
SwingUtilities.invokeAndWait(new Runnable() {
@Override
public void run() {
IdeEventQueue.getInstance();
}
});
SwingUtilities.invokeAndWait(EmptyRunnable.getInstance());
SwingUtilities.invokeAndWait(EmptyRunnable.getInstance());
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
@Override
public void runBare() throws Throwable {
if (!shouldRunTest()) return;
if (runInDispatchThread()) {
replaceIdeEventQueueSafely();
final Throwable[] exception = {null};
UIUtil.invokeAndWaitIfNeeded(new Runnable() {
@Override
public void run() {
try {
defaultRunBare();
}
catch (Throwable tearingDown) {
if (exception[0] == null) exception[0] = tearingDown;
}
}
});
if (exception[0] != null) throw exception[0];
}
else {
defaultRunBare();
}
}
protected boolean runInDispatchThread() {
return true;
}
@NonNls
public static String toString(Iterable<?> collection) {
if (!collection.iterator().hasNext()) {
return "<empty>";
}
final StringBuilder builder = new StringBuilder();
for (final Object o : collection) {
if (o instanceof THashSet) {
builder.append(new TreeSet<Object>((THashSet)o));
}
else {
builder.append(o);
}
builder.append("\n");
}
return builder.toString();
}
public static <T> void assertOrderedEquals(T[] actual, T... expected) {
assertOrderedEquals(Arrays.asList(actual), expected);
}
public static <T> void assertOrderedEquals(Iterable<T> actual, T... expected) {
assertOrderedEquals(null, actual, expected);
}
public static void assertOrderedEquals(@NotNull byte[] actual, @NotNull byte[] expected) {
assertEquals(actual.length, expected.length);
for (int i = 0; i < actual.length; i++) {
byte a = actual[i];
byte e = expected[i];
assertEquals("not equals at index: "+i, e, a);
}
}
public static void assertOrderedEquals(@NotNull int[] actual, @NotNull int[] expected) {
if (actual.length != expected.length) {
fail("Expected size: "+expected.length+"; actual: "+actual.length+"\nexpected: "+Arrays.toString(expected)+"\nactual : "+Arrays.toString(actual));
}
for (int i = 0; i < actual.length; i++) {
int a = actual[i];
int e = expected[i];
assertEquals("not equals at index: "+i, e, a);
}
}
public static <T> void assertOrderedEquals(final String errorMsg, @NotNull Iterable<T> actual, @NotNull T... expected) {
Assert.assertNotNull(actual);
Assert.assertNotNull(expected);
assertOrderedEquals(errorMsg, actual, Arrays.asList(expected));
}
public static <T> void assertOrderedEquals(final Iterable<? extends T> actual, final Collection<? extends T> expected) {
assertOrderedEquals(null, actual, expected);
}
public static <T> void assertOrderedEquals(final String erroMsg,
final Iterable<? extends T> actual,
final Collection<? extends T> expected) {
ArrayList<T> list = new ArrayList<T>();
for (T t : actual) {
list.add(t);
}
if (!list.equals(new ArrayList<T>(expected))) {
String expectedString = toString(expected);
String actualString = toString(actual);
Assert.assertEquals(erroMsg, expectedString, actualString);
Assert.fail("Warning! 'toString' does not reflect the difference.\nExpected: " + expectedString + "\nActual: " + actualString);
}
}
public static <T> void assertOrderedCollection(T[] collection, @NotNull Consumer<T>... checkers) {
Assert.assertNotNull(collection);
assertOrderedCollection(Arrays.asList(collection), checkers);
}
public static <T> void assertSameElements(T[] collection, T... expected) {
assertSameElements(Arrays.asList(collection), expected);
}
public static <T> void assertSameElements(Collection<? extends T> collection, T... expected) {
assertSameElements(collection, Arrays.asList(expected));
}
public static <T> void assertSameElements(Collection<? extends T> collection, Collection<T> expected) {
assertSameElements(null, collection, expected);
}
public static <T> void assertSameElements(String message, Collection<? extends T> collection, Collection<T> expected) {
assertNotNull(collection);
assertNotNull(expected);
if (collection.size() != expected.size() || !new HashSet<T>(expected).equals(new HashSet<T>(collection))) {
Assert.assertEquals(message, toString(expected, "\n"), toString(collection, "\n"));
Assert.assertEquals(message, new HashSet<T>(expected), new HashSet<T>(collection));
}
}
public <T> void assertContainsOrdered(Collection<? extends T> collection, T... expected) {
assertContainsOrdered(collection, Arrays.asList(expected));
}
public <T> void assertContainsOrdered(Collection<? extends T> collection, Collection<T> expected) {
ArrayList<T> copy = new ArrayList<T>(collection);
copy.retainAll(expected);
assertOrderedEquals(toString(collection), copy, expected);
}
public <T> void assertContainsElements(Collection<? extends T> collection, T... expected) {
assertContainsElements(collection, Arrays.asList(expected));
}
public <T> void assertContainsElements(Collection<? extends T> collection, Collection<T> expected) {
ArrayList<T> copy = new ArrayList<T>(collection);
copy.retainAll(expected);
assertSameElements(toString(collection), copy, expected);
}
public static String toString(Object[] collection, String separator) {
return toString(Arrays.asList(collection), separator);
}
public <T> void assertDoesntContain(Collection<? extends T> collection, T... notExpected) {
assertDoesntContain(collection, Arrays.asList(notExpected));
}
public <T> void assertDoesntContain(Collection<? extends T> collection, Collection<T> notExpected) {
ArrayList<T> expected = new ArrayList<T>(collection);
expected.removeAll(notExpected);
assertSameElements(collection, expected);
}
public static String toString(Collection<?> collection, String separator) {
List<String> list = ContainerUtil.map2List(collection, new Function<Object, String>() {
@Override
public String fun(final Object o) {
return String.valueOf(o);
}
});
Collections.sort(list);
StringBuilder builder = new StringBuilder();
boolean flag = false;
for (final String o : list) {
if (flag) {
builder.append(separator);
}
builder.append(o);
flag = true;
}
return builder.toString();
}
public static <T> void assertOrderedCollection(Collection<? extends T> collection, Consumer<T>... checkers) {
Assert.assertNotNull(collection);
if (collection.size() != checkers.length) {
Assert.fail(toString(collection));
}
int i = 0;
for (final T actual : collection) {
try {
checkers[i].consume(actual);
}
catch (AssertionFailedError e) {
System.out.println(i + ": " + actual);
throw e;
}
i++;
}
}
public static <T> void assertUnorderedCollection(T[] collection, Consumer<T>... checkers) {
assertUnorderedCollection(Arrays.asList(collection), checkers);
}
public static <T> void assertUnorderedCollection(Collection<? extends T> collection, Consumer<T>... checkers) {
Assert.assertNotNull(collection);
if (collection.size() != checkers.length) {
Assert.fail(toString(collection));
}
Set<Consumer<T>> checkerSet = new HashSet<Consumer<T>>(Arrays.asList(checkers));
int i = 0;
Throwable lastError = null;
for (final T actual : collection) {
boolean flag = true;
for (final Consumer<T> condition : checkerSet) {
Throwable error = accepts(condition, actual);
if (error == null) {
checkerSet.remove(condition);
flag = false;
break;
}
else {
lastError = error;
}
}
if (flag) {
lastError.printStackTrace();
Assert.fail("Incorrect element(" + i + "): " + actual);
}
i++;
}
}
private static <T> Throwable accepts(final Consumer<T> condition, final T actual) {
try {
condition.consume(actual);
return null;
}
catch (Throwable e) {
return e;
}
}
@Contract("null, _ -> fail")
public static <T> T assertInstanceOf(Object o, Class<T> aClass) {
Assert.assertNotNull("Expected instance of: " + aClass.getName() + " actual: " + null, o);
Assert.assertTrue("Expected instance of: " + aClass.getName() + " actual: " + o.getClass().getName(), aClass.isInstance(o));
@SuppressWarnings("unchecked") T t = (T)o;
return t;
}
public static <T> T assertOneElement(Collection<T> collection) {
Assert.assertNotNull(collection);
Iterator<T> iterator = collection.iterator();
String toString = toString(collection);
Assert.assertTrue(toString, iterator.hasNext());
T t = iterator.next();
Assert.assertFalse(toString, iterator.hasNext());
return t;
}
public static <T> T assertOneElement(T[] ts) {
Assert.assertNotNull(ts);
Assert.assertEquals(Arrays.asList(ts).toString(), 1, ts.length);
return ts[0];
}
public static <T> void assertOneOf(T value, T... values) {
boolean found = false;
for (T v : values) {
if (value == v || value != null && value.equals(v)) {
found = true;
}
}
Assert.assertTrue(value + " should be equal to one of " + Arrays.toString(values), found);
}
public static void printThreadDump() {
PerformanceWatcher.dumpThreadsToConsole("Thread dump:");
}
public static void assertEmpty(final Object[] array) {
assertOrderedEquals(array);
}
public static void assertNotEmpty(final Collection<?> collection) {
if (collection == null) return;
assertTrue(!collection.isEmpty());
}
public static void assertEmpty(final Collection<?> collection) {
assertEmpty(collection.toString(), collection);
}
public static void assertNullOrEmpty(final Collection<?> collection) {
if (collection == null) return;
assertEmpty(null, collection);
}
public static void assertEmpty(final String s) {
assertTrue(s, StringUtil.isEmpty(s));
}
public static <T> void assertEmpty(final String errorMsg, final Collection<T> collection) {
assertOrderedEquals(errorMsg, collection);
}
public static void assertSize(int expectedSize, final Object[] array) {
assertEquals(toString(Arrays.asList(array)), expectedSize, array.length);
}
public static void assertSize(int expectedSize, final Collection<?> c) {
assertEquals(toString(c), expectedSize, c.size());
}
protected <T extends Disposable> T disposeOnTearDown(final T disposable) {
Disposer.register(myTestRootDisposable, disposable);
return disposable;
}
public static void assertSameLines(String expected, String actual) {
String expectedText = StringUtil.convertLineSeparators(expected.trim());
String actualText = StringUtil.convertLineSeparators(actual.trim());
Assert.assertEquals(expectedText, actualText);
}
public static void assertExists(File file){
assertTrue("File should exist " + file, file.exists());
}
public static void assertDoesntExist(File file){
assertFalse("File should not exist " + file, file.exists());
}
protected String getTestName(boolean lowercaseFirstLetter) {
String name = getName();
return getTestName(name, lowercaseFirstLetter);
}
public static String getTestName(String name, boolean lowercaseFirstLetter) {
if (name == null) {
return "";
}
name = StringUtil.trimStart(name, "test");
if (StringUtil.isEmpty(name)) {
return "";
}
return lowercaseFirstLetter(name, lowercaseFirstLetter);
}
public static String lowercaseFirstLetter(String name, boolean lowercaseFirstLetter) {
if (lowercaseFirstLetter && !isAllUppercaseName(name)) {
name = Character.toLowerCase(name.charAt(0)) + name.substring(1);
}
return name;
}
public static boolean isAllUppercaseName(String name) {
int uppercaseChars = 0;
for (int i = 0; i < name.length(); i++) {
if (Character.isLowerCase(name.charAt(i))) {
return false;
}
if (Character.isUpperCase(name.charAt(i))) {
uppercaseChars++;
}
}
return uppercaseChars >= 3;
}
protected String getTestDirectoryName() {
final String testName = getTestName(true);
return testName.replaceAll("_.*", "");
}
public static void assertSameLinesWithFile(String filePath, String actualText) {
assertSameLinesWithFile(filePath, actualText, true);
}
public static void assertSameLinesWithFile(String filePath, String actualText, boolean trimBeforeComparing) {
String fileText;
try {
if (OVERWRITE_TESTDATA) {
VfsTestUtil.overwriteTestData(filePath, actualText);
System.out.println("File " + filePath + " created.");
}
fileText = FileUtil.loadFile(new File(filePath), CharsetToolkit.UTF8_CHARSET);
}
catch (FileNotFoundException e) {
VfsTestUtil.overwriteTestData(filePath, actualText);
throw new AssertionFailedError("No output text found. File " + filePath + " created.");
}
catch (IOException e) {
throw new RuntimeException(e);
}
String expected = StringUtil.convertLineSeparators(trimBeforeComparing ? fileText.trim() : fileText);
String actual = StringUtil.convertLineSeparators(trimBeforeComparing ? actualText.trim() : actualText);
if (!Comparing.equal(expected, actual)) {
throw new FileComparisonFailure(null, expected, actual, filePath);
}
}
public static void clearFields(final Object test) throws IllegalAccessException {
Class aClass = test.getClass();
while (aClass != null) {
clearDeclaredFields(test, aClass);
aClass = aClass.getSuperclass();
}
}
public static void clearDeclaredFields(Object test, Class aClass) throws IllegalAccessException {
if (aClass == null) return;
for (final Field field : aClass.getDeclaredFields()) {
@NonNls final String name = field.getDeclaringClass().getName();
if (!name.startsWith("junit.framework.") && !name.startsWith("com.intellij.testFramework.")) {
final int modifiers = field.getModifiers();
if ((modifiers & Modifier.FINAL) == 0 && (modifiers & Modifier.STATIC) == 0 && !field.getType().isPrimitive()) {
field.setAccessible(true);
field.set(test, null);
}
}
}
}
@SuppressWarnings("deprecation")
protected static void checkSettingsEqual(CodeStyleSettings expected, CodeStyleSettings settings, String message) throws Exception {
if (expected == null || settings == null) return;
Element oldS = new Element("temp");
expected.writeExternal(oldS);
Element newS = new Element("temp");
settings.writeExternal(newS);
String newString = JDOMUtil.writeElement(newS, "\n");
String oldString = JDOMUtil.writeElement(oldS, "\n");
Assert.assertEquals(message, oldString, newString);
}
public boolean isPerformanceTest() {
String name = getName();
return name != null && name.contains("Performance") || getClass().getName().contains("Performance");
}
public static void doPostponedFormatting(final Project project) {
DocumentUtil.writeInRunUndoTransparentAction(new Runnable() {
@Override
public void run() {
PsiDocumentManager.getInstance(project).commitAllDocuments();
PostprocessReformattingAspect.getInstance(project).doPostponedFormatting();
}
});
}
protected static void checkAllTimersAreDisposed() {
Field firstTimerF;
Object timerQueue;
Object timer;
try {
Class<?> TimerQueueC = Class.forName("javax.swing.TimerQueue");
Method sharedInstance = TimerQueueC.getDeclaredMethod("sharedInstance");
sharedInstance.setAccessible(true);
firstTimerF = ReflectionUtil.getDeclaredField(TimerQueueC, "firstTimer");
timerQueue = sharedInstance.invoke(null);
if (firstTimerF == null) {
// jdk 8
DelayQueue delayQueue = ReflectionUtil.getField(TimerQueueC, timerQueue, DelayQueue.class, "queue");
timer = delayQueue.peek();
}
else {
// ancient jdk
firstTimerF.setAccessible(true);
timer = firstTimerF.get(timerQueue);
}
}
catch (Throwable e) {
throw new RuntimeException(e);
}
if (timer != null) {
if (firstTimerF != null) {
ReflectionUtil.resetField(timerQueue, firstTimerF);
}
String text = "";
if (timer instanceof Delayed) {
long delay = ((Delayed)timer).getDelay(TimeUnit.MILLISECONDS);
text = "(delayed for "+delay+"ms)";
Method getTimer = ReflectionUtil.getDeclaredMethod(timer.getClass(), "getTimer");
getTimer.setAccessible(true);
try {
timer = getTimer.invoke(timer);
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
Timer t = (Timer)timer;
text = "Timer (listeners: "+Arrays.asList(t.getActionListeners()) + ") "+text;
fail("Not disposed Timer: " + text + "; queue:" + timerQueue);
}
}
/**
* Checks that code block throw corresponding exception.
*
* @param exceptionCase Block annotated with some exception type
* @throws Throwable
*/
protected void assertException(final AbstractExceptionCase exceptionCase) throws Throwable {
assertException(exceptionCase, null);
}
/**
* Checks that code block throw corresponding exception with expected error msg.
* If expected error message is null it will not be checked.
*
* @param exceptionCase Block annotated with some exception type
* @param expectedErrorMsg expected error messge
* @throws Throwable
*/
protected void assertException(final AbstractExceptionCase exceptionCase,
@Nullable final String expectedErrorMsg) throws Throwable {
assertExceptionOccurred(true, exceptionCase, expectedErrorMsg);
}
/**
* Checks that code block doesn't throw corresponding exception.
*
* @param exceptionCase Block annotated with some exception type
* @throws Throwable
*/
protected void assertNoException(final AbstractExceptionCase exceptionCase) throws Throwable {
assertExceptionOccurred(false, exceptionCase, null);
}
protected void assertNoThrowable(final Runnable closure) {
String throwableName = null;
try {
closure.run();
}
catch (Throwable thr) {
throwableName = thr.getClass().getName();
}
assertNull(throwableName);
}
private static void assertExceptionOccurred(boolean shouldOccur,
AbstractExceptionCase exceptionCase,
String expectedErrorMsg) throws Throwable {
boolean wasThrown = false;
try {
exceptionCase.tryClosure();
}
catch (Throwable e) {
if (shouldOccur) {
wasThrown = true;
final String errorMessage = exceptionCase.getAssertionErrorMessage();
assertEquals(errorMessage, exceptionCase.getExpectedExceptionClass(), e.getClass());
if (expectedErrorMsg != null) {
assertEquals("Compare error messages", expectedErrorMsg, e.getMessage());
}
}
else if (exceptionCase.getExpectedExceptionClass().equals(e.getClass())) {
wasThrown = true;
System.out.println("");
e.printStackTrace(System.out);
fail("Exception isn't expected here. Exception message: " + e.getMessage());
}
else {
throw e;
}
}
finally {
if (shouldOccur && !wasThrown) {
fail(exceptionCase.getAssertionErrorMessage());
}
}
}
protected boolean annotatedWith(@NotNull Class annotationClass) {
Class<?> aClass = getClass();
String methodName = "test" + getTestName(false);
boolean methodChecked = false;
while (aClass != null && aClass != Object.class) {
if (aClass.getAnnotation(annotationClass) != null) return true;
if (!methodChecked) {
Method method = ReflectionUtil.getDeclaredMethod(aClass, methodName);
if (method != null) {
if (method.getAnnotation(annotationClass) != null) return true;
methodChecked = true;
}
}
aClass = aClass.getSuperclass();
}
return false;
}
protected String getHomePath() {
return PathManager.getHomePath().replace(File.separatorChar, '/');
}
protected static boolean isInHeadlessEnvironment() {
return GraphicsEnvironment.isHeadless();
}
public static void refreshRecursively(@NotNull VirtualFile file) {
VfsUtilCore.visitChildrenRecursively(file, new VirtualFileVisitor() {
@Override
public boolean visitFile(@NotNull VirtualFile file) {
file.getChildren();
return true;
}
});
file.refresh(false, true);
}
@NotNull
public static Test filteredSuite(@RegExp String regexp, @NotNull Test test) {
final Pattern pattern = Pattern.compile(regexp);
final TestSuite testSuite = new TestSuite();
new Processor<Test>() {
@Override
public boolean process(Test test) {
if (test instanceof TestSuite) {
for (int i = 0, len = ((TestSuite)test).testCount(); i < len; i++) {
process(((TestSuite)test).testAt(i));
}
}
else if (pattern.matcher(test.toString()).find()) {
testSuite.addTest(test);
}
return false;
}
}.process(test);
return testSuite;
}
@Nullable
public static VirtualFile refreshAndFindFile(@NotNull final File file) {
return UIUtil.invokeAndWaitIfNeeded(new Computable<VirtualFile>() {
@Override
public VirtualFile compute() {
return LocalFileSystem.getInstance().refreshAndFindFileByIoFile(file);
}
});
}
public static <E extends Exception> void invokeAndWaitIfNeeded(@NotNull final ThrowableRunnable<E> runnable) throws Exception {
if (SwingUtilities.isEventDispatchThread()) {
runnable.run();
}
else {
final Ref<Exception> ref = Ref.create();
SwingUtilities.invokeAndWait(new Runnable() {
@Override
public void run() {
try {
runnable.run();
}
catch (Exception e) {
ref.set(e);
}
}
});
if (!ref.isNull()) throw ref.get();
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.shard;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.search.Sort;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FilterDirectory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.routing.RecoverySource;
import org.elasticsearch.cluster.routing.RecoverySource.SnapshotRecoverySource;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.engine.EngineDiskUtils;
import org.elasticsearch.index.engine.EngineException;
import org.elasticsearch.index.engine.InternalEngine;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.seqno.SequenceNumbers;
import org.elasticsearch.index.snapshots.IndexShardRestoreFailedException;
import org.elasticsearch.index.store.Store;
import org.elasticsearch.indices.recovery.RecoveryState;
import org.elasticsearch.repositories.IndexId;
import org.elasticsearch.repositories.Repository;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.BiConsumer;
import java.util.stream.Collectors;
import static org.elasticsearch.common.unit.TimeValue.timeValueMillis;
/**
* This package private utility class encapsulates the logic to recover an index shard from either an existing index on
* disk or from a snapshot in a repository.
*/
final class StoreRecovery {
private final Logger logger;
private final ShardId shardId;
StoreRecovery(ShardId shardId, Logger logger) {
this.logger = logger;
this.shardId = shardId;
}
/**
* Recovers a shard from it's local file system store. This method required pre-knowledge about if the shard should
* exist on disk ie. has been previously allocated or if the shard is a brand new allocation without pre-existing index
* files / transaction logs. This
* @param indexShard the index shard instance to recovery the shard into
* @return <code>true</code> if the shard has been recovered successfully, <code>false</code> if the recovery
* has been ignored due to a concurrent modification of if the clusters state has changed due to async updates.
* @see Store
*/
boolean recoverFromStore(final IndexShard indexShard) {
if (canRecover(indexShard)) {
RecoverySource.Type recoveryType = indexShard.recoveryState().getRecoverySource().getType();
assert recoveryType == RecoverySource.Type.EMPTY_STORE || recoveryType == RecoverySource.Type.EXISTING_STORE :
"expected store recovery type but was: " + recoveryType;
return executeRecovery(indexShard, () -> {
logger.debug("starting recovery from store ...");
internalRecoverFromStore(indexShard);
});
}
return false;
}
boolean recoverFromLocalShards(BiConsumer<String, MappingMetaData> mappingUpdateConsumer, final IndexShard indexShard, final List<LocalShardSnapshot> shards) throws IOException {
if (canRecover(indexShard)) {
RecoverySource.Type recoveryType = indexShard.recoveryState().getRecoverySource().getType();
assert recoveryType == RecoverySource.Type.LOCAL_SHARDS: "expected local shards recovery type: " + recoveryType;
if (shards.isEmpty()) {
throw new IllegalArgumentException("shards must not be empty");
}
Set<Index> indices = shards.stream().map((s) -> s.getIndex()).collect(Collectors.toSet());
if (indices.size() > 1) {
throw new IllegalArgumentException("can't add shards from more than one index");
}
IndexMetaData sourceMetaData = shards.get(0).getIndexMetaData();
for (ObjectObjectCursor<String, MappingMetaData> mapping : sourceMetaData.getMappings()) {
mappingUpdateConsumer.accept(mapping.key, mapping.value);
}
indexShard.mapperService().merge(sourceMetaData, MapperService.MergeReason.MAPPING_RECOVERY);
// now that the mapping is merged we can validate the index sort configuration.
Sort indexSort = indexShard.getIndexSort();
final boolean hasNested = indexShard.mapperService().hasNested();
final boolean isSplit = sourceMetaData.getNumberOfShards() < indexShard.indexSettings().getNumberOfShards();
assert isSplit == false || sourceMetaData.getCreationVersion().onOrAfter(Version.V_6_0_0_alpha1) : "for split we require a " +
"single type but the index is created before 6.0.0";
return executeRecovery(indexShard, () -> {
logger.debug("starting recovery from local shards {}", shards);
try {
final Directory directory = indexShard.store().directory(); // don't close this directory!!
final Directory[] sources = shards.stream().map(LocalShardSnapshot::getSnapshotDirectory).toArray(Directory[]::new);
final long maxSeqNo = shards.stream().mapToLong(LocalShardSnapshot::maxSeqNo).max().getAsLong();
final long maxUnsafeAutoIdTimestamp =
shards.stream().mapToLong(LocalShardSnapshot::maxUnsafeAutoIdTimestamp).max().getAsLong();
addIndices(indexShard.recoveryState().getIndex(), directory, indexSort, sources, maxSeqNo, maxUnsafeAutoIdTimestamp,
indexShard.indexSettings().getIndexMetaData(), indexShard.shardId().id(), isSplit, hasNested);
internalRecoverFromStore(indexShard);
// just trigger a merge to do housekeeping on the
// copied segments - we will also see them in stats etc.
indexShard.getEngine().forceMerge(false, -1, false, false, false);
} catch (IOException ex) {
throw new IndexShardRecoveryException(indexShard.shardId(), "failed to recover from local shards", ex);
}
});
}
return false;
}
void addIndices(final RecoveryState.Index indexRecoveryStats, final Directory target, final Sort indexSort, final Directory[] sources,
final long maxSeqNo, final long maxUnsafeAutoIdTimestamp, IndexMetaData indexMetaData, int shardId, boolean split,
boolean hasNested) throws IOException {
// clean target directory (if previous recovery attempt failed) and create a fresh segment file with the proper lucene version
Lucene.cleanLuceneIndex(target);
assert sources.length > 0;
final int luceneIndexCreatedVersionMajor = Lucene.readSegmentInfos(sources[0]).getIndexCreatedVersionMajor();
new SegmentInfos(luceneIndexCreatedVersionMajor).commit(target);
final Directory hardLinkOrCopyTarget = new org.apache.lucene.store.HardlinkCopyDirectoryWrapper(target);
IndexWriterConfig iwc = new IndexWriterConfig(null)
.setCommitOnClose(false)
// we don't want merges to happen here - we call maybe merge on the engine
// later once we stared it up otherwise we would need to wait for it here
// we also don't specify a codec here and merges should use the engines for this index
.setMergePolicy(NoMergePolicy.INSTANCE)
.setOpenMode(IndexWriterConfig.OpenMode.APPEND);
if (indexSort != null) {
iwc.setIndexSort(indexSort);
}
try (IndexWriter writer = new IndexWriter(new StatsDirectoryWrapper(hardLinkOrCopyTarget, indexRecoveryStats), iwc)) {
writer.addIndexes(sources);
if (split) {
writer.deleteDocuments(new ShardSplittingQuery(indexMetaData, shardId, hasNested));
}
/*
* We set the maximum sequence number and the local checkpoint on the target to the maximum of the maximum sequence numbers on
* the source shards. This ensures that history after this maximum sequence number can advance and we have correct
* document-level semantics.
*/
writer.setLiveCommitData(() -> {
final HashMap<String, String> liveCommitData = new HashMap<>(3);
liveCommitData.put(SequenceNumbers.MAX_SEQ_NO, Long.toString(maxSeqNo));
liveCommitData.put(SequenceNumbers.LOCAL_CHECKPOINT_KEY, Long.toString(maxSeqNo));
liveCommitData.put(InternalEngine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID, Long.toString(maxUnsafeAutoIdTimestamp));
return liveCommitData.entrySet().iterator();
});
writer.commit();
}
}
/**
* Directory wrapper that records copy process for recovery statistics
*/
static final class StatsDirectoryWrapper extends FilterDirectory {
private final RecoveryState.Index index;
StatsDirectoryWrapper(Directory in, RecoveryState.Index indexRecoveryStats) {
super(in);
this.index = indexRecoveryStats;
}
@Override
public void copyFrom(Directory from, String src, String dest, IOContext context) throws IOException {
final long l = from.fileLength(src);
final AtomicBoolean copies = new AtomicBoolean(false);
// here we wrap the index input form the source directory to report progress of file copy for the recovery stats.
// we increment the num bytes recovered in the readBytes method below, if users pull statistics they can see immediately
// how much has been recovered.
in.copyFrom(new FilterDirectory(from) {
@Override
public IndexInput openInput(String name, IOContext context) throws IOException {
index.addFileDetail(dest, l, false);
copies.set(true);
final IndexInput input = in.openInput(name, context);
return new IndexInput("StatsDirectoryWrapper(" + input.toString() + ")") {
@Override
public void close() throws IOException {
input.close();
}
@Override
public long getFilePointer() {
throw new UnsupportedOperationException("only straight copies are supported");
}
@Override
public void seek(long pos) throws IOException {
throw new UnsupportedOperationException("seeks are not supported");
}
@Override
public long length() {
return input.length();
}
@Override
public IndexInput slice(String sliceDescription, long offset, long length) throws IOException {
throw new UnsupportedOperationException("slices are not supported");
}
@Override
public byte readByte() throws IOException {
throw new UnsupportedOperationException("use a buffer if you wanna perform well");
}
@Override
public void readBytes(byte[] b, int offset, int len) throws IOException {
// we rely on the fact that copyFrom uses a buffer
input.readBytes(b, offset, len);
index.addRecoveredBytesToFile(dest, len);
}
};
}
}, src, dest, context);
if (copies.get() == false) {
index.addFileDetail(dest, l, true); // hardlinked - we treat it as reused since the file was already somewhat there
} else {
assert index.getFileDetails(dest) != null : "File [" + dest + "] has no file details";
assert index.getFileDetails(dest).recovered() == l : index.getFileDetails(dest).toString();
}
}
}
/**
* Recovers an index from a given {@link Repository}. This method restores a
* previously created index snapshot into an existing initializing shard.
* @param indexShard the index shard instance to recovery the snapshot from
* @param repository the repository holding the physical files the shard should be recovered from
* @return <code>true</code> if the shard has been recovered successfully, <code>false</code> if the recovery
* has been ignored due to a concurrent modification of if the clusters state has changed due to async updates.
*/
boolean recoverFromRepository(final IndexShard indexShard, Repository repository) {
if (canRecover(indexShard)) {
RecoverySource.Type recoveryType = indexShard.recoveryState().getRecoverySource().getType();
assert recoveryType == RecoverySource.Type.SNAPSHOT : "expected snapshot recovery type: " + recoveryType;
SnapshotRecoverySource recoverySource = (SnapshotRecoverySource) indexShard.recoveryState().getRecoverySource();
return executeRecovery(indexShard, () -> {
logger.debug("restoring from {} ...", indexShard.recoveryState().getRecoverySource());
restore(indexShard, repository, recoverySource);
});
}
return false;
}
private boolean canRecover(IndexShard indexShard) {
if (indexShard.state() == IndexShardState.CLOSED) {
// got closed on us, just ignore this recovery
return false;
}
if (indexShard.routingEntry().primary() == false) {
throw new IndexShardRecoveryException(shardId, "Trying to recover when the shard is in backup state", null);
}
return true;
}
/**
* Recovers the state of the shard from the store.
*/
private boolean executeRecovery(final IndexShard indexShard, Runnable recoveryRunnable) throws IndexShardRecoveryException {
try {
recoveryRunnable.run();
// Check that the gateway didn't leave the shard in init or recovering stage. it is up to the gateway
// to call post recovery.
final IndexShardState shardState = indexShard.state();
final RecoveryState recoveryState = indexShard.recoveryState();
assert shardState != IndexShardState.CREATED && shardState != IndexShardState.RECOVERING : "recovery process of " + shardId + " didn't get to post_recovery. shardState [" + shardState + "]";
if (logger.isTraceEnabled()) {
RecoveryState.Index index = recoveryState.getIndex();
StringBuilder sb = new StringBuilder();
sb.append(" index : files [").append(index.totalFileCount()).append("] with total_size [")
.append(new ByteSizeValue(index.totalBytes())).append("], took[")
.append(TimeValue.timeValueMillis(index.time())).append("]\n");
sb.append(" : recovered_files [").append(index.recoveredFileCount()).append("] with total_size [")
.append(new ByteSizeValue(index.recoveredBytes())).append("]\n");
sb.append(" : reusing_files [").append(index.reusedFileCount()).append("] with total_size [")
.append(new ByteSizeValue(index.reusedBytes())).append("]\n");
sb.append(" verify_index : took [").append(TimeValue.timeValueMillis(recoveryState.getVerifyIndex().time())).append("], check_index [")
.append(timeValueMillis(recoveryState.getVerifyIndex().checkIndexTime())).append("]\n");
sb.append(" translog : number_of_operations [").append(recoveryState.getTranslog().recoveredOperations())
.append("], took [").append(TimeValue.timeValueMillis(recoveryState.getTranslog().time())).append("]");
logger.trace("recovery completed from [shard_store], took [{}]\n{}", timeValueMillis(recoveryState.getTimer().time()), sb);
} else if (logger.isDebugEnabled()) {
logger.debug("recovery completed from [shard_store], took [{}]", timeValueMillis(recoveryState.getTimer().time()));
}
return true;
} catch (IndexShardRecoveryException e) {
if (indexShard.state() == IndexShardState.CLOSED) {
// got closed on us, just ignore this recovery
return false;
}
if ((e.getCause() instanceof IndexShardClosedException) || (e.getCause() instanceof IndexShardNotStartedException)) {
// got closed on us, just ignore this recovery
return false;
}
throw e;
} catch (IndexShardClosedException | IndexShardNotStartedException e) {
} catch (Exception e) {
if (indexShard.state() == IndexShardState.CLOSED) {
// got closed on us, just ignore this recovery
return false;
}
throw new IndexShardRecoveryException(shardId, "failed recovery", e);
}
return false;
}
/**
* Recovers the state of the shard from the store.
*/
private void internalRecoverFromStore(IndexShard indexShard) throws IndexShardRecoveryException {
final RecoveryState recoveryState = indexShard.recoveryState();
final boolean indexShouldExists = recoveryState.getRecoverySource().getType() != RecoverySource.Type.EMPTY_STORE;
indexShard.prepareForIndexRecovery();
long version = -1;
SegmentInfos si = null;
final Store store = indexShard.store();
store.incRef();
try {
try {
store.failIfCorrupted();
try {
si = store.readLastCommittedSegmentsInfo();
} catch (Exception e) {
String files = "_unknown_";
try {
files = Arrays.toString(store.directory().listAll());
} catch (Exception inner) {
inner.addSuppressed(e);
files += " (failure=" + ExceptionsHelper.detailedMessage(inner) + ")";
}
if (indexShouldExists) {
throw new IndexShardRecoveryException(shardId, "shard allocated for local recovery (post api), should exist, but doesn't, current files: " + files, e);
}
}
if (si != null) {
if (indexShouldExists) {
version = si.getVersion();
} else {
// it exists on the directory, but shouldn't exist on the FS, its a leftover (possibly dangling)
// its a "new index create" API, we have to do something, so better to clean it than use same data
logger.trace("cleaning existing shard, shouldn't exists");
Lucene.cleanLuceneIndex(store.directory());
si = null;
}
}
} catch (Exception e) {
throw new IndexShardRecoveryException(shardId, "failed to fetch index version after copying it over", e);
}
recoveryState.getIndex().updateVersion(version);
if (recoveryState.getRecoverySource().getType() == RecoverySource.Type.LOCAL_SHARDS) {
assert indexShouldExists;
EngineDiskUtils.bootstrapNewHistoryFromLuceneIndex(store.directory(), indexShard.shardPath().resolveTranslog(), shardId);
} else if (indexShouldExists) {
// since we recover from local, just fill the files and size
try {
final RecoveryState.Index index = recoveryState.getIndex();
if (si != null) {
addRecoveredFileDetails(si, store, index);
}
} catch (IOException e) {
logger.debug("failed to list file details", e);
}
} else {
EngineDiskUtils.createEmpty(store.directory(), indexShard.shardPath().resolveTranslog(), shardId);
}
indexShard.openEngineAndRecoverFromTranslog();
indexShard.getEngine().fillSeqNoGaps(indexShard.getPrimaryTerm());
indexShard.finalizeRecovery();
indexShard.postRecovery("post recovery from shard_store");
} catch (EngineException | IOException e) {
throw new IndexShardRecoveryException(shardId, "failed to recover from gateway", e);
} finally {
store.decRef();
}
}
private void addRecoveredFileDetails(SegmentInfos si, Store store, RecoveryState.Index index) throws IOException {
final Directory directory = store.directory();
for (String name : Lucene.files(si)) {
long length = directory.fileLength(name);
index.addFileDetail(name, length, true);
}
}
/**
* Restores shard from {@link SnapshotRecoverySource} associated with this shard in routing table
*/
private void restore(final IndexShard indexShard, final Repository repository, final SnapshotRecoverySource restoreSource) {
final RecoveryState.Translog translogState = indexShard.recoveryState().getTranslog();
if (restoreSource == null) {
throw new IndexShardRestoreFailedException(shardId, "empty restore source");
}
if (logger.isTraceEnabled()) {
logger.trace("[{}] restoring shard [{}]", restoreSource.snapshot(), shardId);
}
try {
translogState.totalOperations(0);
translogState.totalOperationsOnStart(0);
indexShard.prepareForIndexRecovery();
ShardId snapshotShardId = shardId;
final String indexName = restoreSource.index();
if (!shardId.getIndexName().equals(indexName)) {
snapshotShardId = new ShardId(indexName, IndexMetaData.INDEX_UUID_NA_VALUE, shardId.id());
}
final IndexId indexId = repository.getRepositoryData().resolveIndexId(indexName);
repository.restoreShard(indexShard, restoreSource.snapshot().getSnapshotId(), restoreSource.version(), indexId, snapshotShardId, indexShard.recoveryState());
EngineDiskUtils.bootstrapNewHistoryFromLuceneIndex(indexShard.store().directory(), indexShard.shardPath().resolveTranslog(),
shardId);
assert indexShard.shardRouting.primary() : "only primary shards can recover from store";
indexShard.openEngineAndRecoverFromTranslog();
indexShard.getEngine().fillSeqNoGaps(indexShard.getPrimaryTerm());
indexShard.finalizeRecovery();
indexShard.postRecovery("restore done");
} catch (Exception e) {
throw new IndexShardRestoreFailedException(shardId, "restore failed", e);
}
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.segment;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.Lists;
import com.google.common.primitives.Ints;
import io.druid.collections.bitmap.BitmapFactory;
import io.druid.collections.bitmap.MutableBitmap;
import io.druid.common.config.NullHandling;
import io.druid.data.input.impl.DimensionSchema.MultiValueHandling;
import io.druid.java.util.common.ISE;
import io.druid.java.util.common.guava.Comparators;
import io.druid.query.dimension.DefaultDimensionSpec;
import io.druid.query.dimension.DimensionSpec;
import io.druid.query.extraction.ExtractionFn;
import io.druid.query.filter.ValueMatcher;
import io.druid.query.monomorphicprocessing.RuntimeShapeInspector;
import io.druid.segment.data.ArrayBasedIndexedInts;
import io.druid.segment.data.Indexed;
import io.druid.segment.data.IndexedInts;
import io.druid.segment.data.IndexedIterable;
import io.druid.segment.filter.BooleanValueMatcher;
import io.druid.segment.incremental.IncrementalIndex;
import io.druid.segment.incremental.TimeAndDimsHolder;
import it.unimi.dsi.fastutil.ints.IntArrays;
import it.unimi.dsi.fastutil.ints.IntIterator;
import it.unimi.dsi.fastutil.objects.Object2IntMap;
import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap;
import it.unimi.dsi.fastutil.objects.Object2IntRBTreeMap;
import it.unimi.dsi.fastutil.objects.Object2IntSortedMap;
import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.BitSet;
import java.util.Iterator;
import java.util.List;
public class StringDimensionIndexer implements DimensionIndexer<Integer, int[], String>
{
private static String emptytoNullIfNeeded(Object o)
{
return o != null
? NullHandling.emptyToNullIfNeeded(o.toString())
: null;
}
private static final int ABSENT_VALUE_ID = -1;
private static class DimensionDictionary
{
private String minValue = null;
private String maxValue = null;
private int idForNull = ABSENT_VALUE_ID;
private final Object2IntMap<String> valueToId = new Object2IntOpenHashMap<>();
private final List<String> idToValue = Lists.newArrayList();
private final Object lock;
public DimensionDictionary()
{
this.lock = new Object();
valueToId.defaultReturnValue(-1);
}
public int getId(String value)
{
synchronized (lock) {
if (value == null) {
return idForNull;
}
return valueToId.getInt(value);
}
}
public String getValue(int id)
{
synchronized (lock) {
if (id == idForNull) {
return null;
}
return idToValue.get(id);
}
}
public int size()
{
synchronized (lock) {
// using idToValue rather than valueToId because the valueToId doesn't account null value, if it is present.
return idToValue.size();
}
}
public int add(String originalValue)
{
synchronized (lock) {
if (originalValue == null) {
if (idForNull == ABSENT_VALUE_ID) {
idForNull = size();
idToValue.add(null);
}
return idForNull;
}
int prev = valueToId.getInt(originalValue);
if (prev >= 0) {
return prev;
}
final int index = size();
valueToId.put(originalValue, index);
idToValue.add(originalValue);
minValue = minValue == null || minValue.compareTo(originalValue) > 0 ? originalValue : minValue;
maxValue = maxValue == null || maxValue.compareTo(originalValue) < 0 ? originalValue : maxValue;
return index;
}
}
public String getMinValue()
{
synchronized (lock) {
return minValue;
}
}
public String getMaxValue()
{
synchronized (lock) {
return maxValue;
}
}
public SortedDimensionDictionary sort()
{
synchronized (lock) {
return new SortedDimensionDictionary(idToValue, size());
}
}
}
private static class SortedDimensionDictionary
{
private final List<String> sortedVals;
private final int[] idToIndex;
private final int[] indexToId;
public SortedDimensionDictionary(List<String> idToValue, int length)
{
Object2IntSortedMap<String> sortedMap = new Object2IntRBTreeMap<>(Comparators.naturalNullsFirst());
for (int id = 0; id < length; id++) {
String value = idToValue.get(id);
sortedMap.put(value, id);
}
this.sortedVals = Lists.newArrayList(sortedMap.keySet());
this.idToIndex = new int[length];
this.indexToId = new int[length];
int index = 0;
for (IntIterator iterator = sortedMap.values().iterator(); iterator.hasNext();) {
int id = iterator.nextInt();
idToIndex[id] = index;
indexToId[index] = id;
index++;
}
}
public int getUnsortedIdFromSortedId(int index)
{
return indexToId[index];
}
public int getSortedIdFromUnsortedId(int id)
{
return idToIndex[id];
}
public String getValueFromSortedId(int index)
{
return sortedVals.get(index);
}
}
private final DimensionDictionary dimLookup;
private final MultiValueHandling multiValueHandling;
private final boolean hasBitmapIndexes;
private SortedDimensionDictionary sortedLookup;
public StringDimensionIndexer(MultiValueHandling multiValueHandling, boolean hasBitmapIndexes)
{
this.dimLookup = new DimensionDictionary();
this.multiValueHandling = multiValueHandling == null ? MultiValueHandling.ofDefault() : multiValueHandling;
this.hasBitmapIndexes = hasBitmapIndexes;
}
@Override
public int[] processRowValsToUnsortedEncodedKeyComponent(Object dimValues, boolean reportParseExceptions)
{
final int[] encodedDimensionValues;
final int oldDictSize = dimLookup.size();
if (dimValues == null) {
final int nullId = dimLookup.getId(null);
encodedDimensionValues = nullId == ABSENT_VALUE_ID ? new int[]{dimLookup.add(null)} : new int[]{nullId};
} else if (dimValues instanceof List) {
List<Object> dimValuesList = (List) dimValues;
if (dimValuesList.isEmpty()) {
dimLookup.add(null);
encodedDimensionValues = IntArrays.EMPTY_ARRAY;
} else if (dimValuesList.size() == 1) {
encodedDimensionValues = new int[]{dimLookup.add(emptytoNullIfNeeded(dimValuesList.get(0)))};
} else {
final String[] dimensionValues = new String[dimValuesList.size()];
for (int i = 0; i < dimValuesList.size(); i++) {
dimensionValues[i] = emptytoNullIfNeeded(dimValuesList.get(i));
}
if (multiValueHandling.needSorting()) {
// Sort multival row by their unencoded values first.
Arrays.sort(dimensionValues, Comparators.naturalNullsFirst());
}
final int[] retVal = new int[dimensionValues.length];
int prevId = -1;
int pos = 0;
for (String dimensionValue : dimensionValues) {
if (multiValueHandling != MultiValueHandling.SORTED_SET) {
retVal[pos++] = dimLookup.add(dimensionValue);
continue;
}
int index = dimLookup.add(dimensionValue);
if (index != prevId) {
prevId = retVal[pos++] = index;
}
}
encodedDimensionValues = pos == retVal.length ? retVal : Arrays.copyOf(retVal, pos);
}
} else {
encodedDimensionValues = new int[]{dimLookup.add(emptytoNullIfNeeded(dimValues))};
}
// If dictionary size has changed, the sorted lookup is no longer valid.
if (oldDictSize != dimLookup.size()) {
sortedLookup = null;
}
return encodedDimensionValues;
}
public Integer getSortedEncodedValueFromUnsorted(Integer unsortedIntermediateValue)
{
return sortedLookup().getSortedIdFromUnsortedId(unsortedIntermediateValue);
}
@Override
public Integer getUnsortedEncodedValueFromSorted(Integer sortedIntermediateValue)
{
return sortedLookup().getUnsortedIdFromSortedId(sortedIntermediateValue);
}
@Override
public Indexed<String> getSortedIndexedValues()
{
return new Indexed<String>()
{
@Override
public Class<? extends String> getClazz()
{
return String.class;
}
@Override
public int size()
{
return getCardinality();
}
@Override
public String get(int index)
{
return getActualValue(index, true);
}
@Override
public int indexOf(String value)
{
int id = getEncodedValue(value, false);
return id < 0 ? ABSENT_VALUE_ID : getSortedEncodedValueFromUnsorted(id);
}
@Override
public Iterator<String> iterator()
{
return IndexedIterable.create(this).iterator();
}
@Override
public void inspectRuntimeShape(RuntimeShapeInspector inspector)
{
// nothing to inspect
}
};
}
@Override
public String getMinValue()
{
return dimLookup.getMinValue();
}
@Override
public String getMaxValue()
{
return dimLookup.getMaxValue();
}
@Override
public int getCardinality()
{
return dimLookup.size();
}
@Override
public int compareUnsortedEncodedKeyComponents(int[] lhs, int[] rhs)
{
int lhsLen = lhs.length;
int rhsLen = rhs.length;
int retVal = Ints.compare(lhsLen, rhsLen);
int valsIndex = 0;
while (retVal == 0 && valsIndex < lhsLen) {
int lhsVal = lhs[valsIndex];
int rhsVal = rhs[valsIndex];
if (lhsVal != rhsVal) {
final String lhsValActual = getActualValue(lhsVal, false);
final String rhsValActual = getActualValue(rhsVal, false);
if (lhsValActual != null && rhsValActual != null) {
retVal = lhsValActual.compareTo(rhsValActual);
} else if (lhsValActual == null ^ rhsValActual == null) {
retVal = lhsValActual == null ? -1 : 1;
}
}
++valsIndex;
}
return retVal;
}
@Override
public boolean checkUnsortedEncodedKeyComponentsEqual(int[] lhs, int[] rhs)
{
return Arrays.equals(lhs, rhs);
}
@Override
public int getUnsortedEncodedKeyComponentHashCode(int[] key)
{
return Arrays.hashCode(key);
}
@Override
public DimensionSelector makeDimensionSelector(
final DimensionSpec spec,
final TimeAndDimsHolder currEntry,
final IncrementalIndex.DimensionDesc desc
)
{
final ExtractionFn extractionFn = spec.getExtractionFn();
final int dimIndex = desc.getIndex();
final int maxId = getCardinality();
class IndexerDimensionSelector implements DimensionSelector, IdLookup
{
private final ArrayBasedIndexedInts indexedInts = new ArrayBasedIndexedInts();
private int[] nullIdIntArray;
@Override
public IndexedInts getRow()
{
final Object[] dims = currEntry.get().getDims();
int[] indices;
if (dimIndex < dims.length) {
indices = (int[]) dims[dimIndex];
} else {
indices = null;
}
int[] row = null;
int rowSize = 0;
// usually due to currEntry's rowIndex is smaller than the row's rowIndex in which this dim first appears
if (indices == null || indices.length == 0) {
final int nullId = getEncodedValue(null, false);
if (nullId > -1) {
if (nullIdIntArray == null) {
nullIdIntArray = new int[] {nullId};
}
row = nullIdIntArray;
rowSize = 1;
} else {
// doesn't contain nullId, then empty array is used
// Choose to use ArrayBasedIndexedInts later, instead of special "empty" IndexedInts, for monomorphism
row = IntArrays.EMPTY_ARRAY;
rowSize = 0;
}
}
if (row == null && indices != null && indices.length > 0) {
row = indices;
rowSize = indices.length;
}
indexedInts.setValues(row, rowSize);
return indexedInts;
}
@Override
public ValueMatcher makeValueMatcher(final String value)
{
if (extractionFn == null) {
final int valueId = lookupId(value);
if (valueId >= 0 || value == null) {
return new ValueMatcher()
{
@Override
public boolean matches()
{
Object[] dims = currEntry.get().getDims();
if (dimIndex >= dims.length) {
return value == null;
}
int[] dimsInt = (int[]) dims[dimIndex];
if (dimsInt == null || dimsInt.length == 0) {
return value == null;
}
for (int id : dimsInt) {
if (id == valueId) {
return true;
}
}
return false;
}
@Override
public void inspectRuntimeShape(RuntimeShapeInspector inspector)
{
// nothing to inspect
}
};
} else {
return BooleanValueMatcher.of(false);
}
} else {
// Employ precomputed BitSet optimization
return makeValueMatcher(Predicates.equalTo(value));
}
}
@Override
public ValueMatcher makeValueMatcher(final Predicate<String> predicate)
{
final BitSet predicateMatchingValueIds = DimensionSelectorUtils.makePredicateMatchingSet(this, predicate);
final boolean matchNull = predicate.apply(null);
return new ValueMatcher()
{
@Override
public boolean matches()
{
Object[] dims = currEntry.get().getDims();
if (dimIndex >= dims.length) {
return matchNull;
}
int[] dimsInt = (int[]) dims[dimIndex];
if (dimsInt == null || dimsInt.length == 0) {
return matchNull;
}
for (int id : dimsInt) {
if (predicateMatchingValueIds.get(id)) {
return true;
}
}
return false;
}
@Override
public void inspectRuntimeShape(RuntimeShapeInspector inspector)
{
// nothing to inspect
}
};
}
@Override
public int getValueCardinality()
{
return maxId;
}
@Override
public String lookupName(int id)
{
if (id >= maxId) {
throw new ISE("id[%d] >= maxId[%d]", id, maxId);
}
final String strValue = getActualValue(id, false);
return extractionFn == null ? strValue : extractionFn.apply(strValue);
}
@Override
public boolean nameLookupPossibleInAdvance()
{
return true;
}
@Nullable
@Override
public IdLookup idLookup()
{
return extractionFn == null ? this : null;
}
@Override
public int lookupId(String name)
{
if (extractionFn != null) {
throw new UnsupportedOperationException(
"cannot perform lookup when applying an extraction function"
);
}
return getEncodedValue(name, false);
}
@SuppressWarnings("deprecation")
@Nullable
@Override
public Object getObject()
{
IncrementalIndex.TimeAndDims key = currEntry.get();
if (key == null) {
return null;
}
Object[] dims = key.getDims();
if (dimIndex >= dims.length) {
return null;
}
return convertUnsortedEncodedKeyComponentToActualArrayOrList(
(int[]) dims[dimIndex],
DimensionIndexer.ARRAY
);
}
@SuppressWarnings("deprecation")
@Override
public Class classOfObject()
{
return Object.class;
}
@Override
public void inspectRuntimeShape(RuntimeShapeInspector inspector)
{
// nothing to inspect
}
}
return new IndexerDimensionSelector();
}
@Override
public ColumnValueSelector<?> makeColumnValueSelector(
TimeAndDimsHolder currEntry,
IncrementalIndex.DimensionDesc desc
)
{
return makeDimensionSelector(DefaultDimensionSpec.of(desc.getName()), currEntry, desc);
}
@Override
public Object convertUnsortedEncodedKeyComponentToActualArrayOrList(int[] key, boolean asList)
{
if (key == null || key.length == 0) {
return null;
}
if (key.length == 1) {
return getActualValue(key[0], false);
} else {
if (asList) {
List<Comparable> rowVals = new ArrayList<>(key.length);
for (int id : key) {
String val = getActualValue(id, false);
rowVals.add(NullHandling.nullToEmptyIfNeeded(val));
}
return rowVals;
} else {
String[] rowArray = new String[key.length];
for (int i = 0; i < key.length; i++) {
String val = getActualValue(key[i], false);
rowArray[i] = NullHandling.nullToEmptyIfNeeded(val);
}
return rowArray;
}
}
}
@Override
public int[] convertUnsortedEncodedKeyComponentToSortedEncodedKeyComponent(int[] key)
{
int[] sortedDimVals = new int[key.length];
for (int i = 0; i < key.length; ++i) {
// The encoded values in the TimeAndDims key are not sorted based on their final unencoded values, so need this lookup.
sortedDimVals[i] = getSortedEncodedValueFromUnsorted(key[i]);
}
return sortedDimVals;
}
@Override
public void fillBitmapsFromUnsortedEncodedKeyComponent(
int[] key, int rowNum, MutableBitmap[] bitmapIndexes, BitmapFactory factory
)
{
if (!hasBitmapIndexes) {
throw new UnsupportedOperationException("This column does not include bitmap indexes");
}
for (int dimValIdx : key) {
if (bitmapIndexes[dimValIdx] == null) {
bitmapIndexes[dimValIdx] = factory.makeEmptyMutableBitmap();
}
bitmapIndexes[dimValIdx].add(rowNum);
}
}
private SortedDimensionDictionary sortedLookup()
{
return sortedLookup == null ? sortedLookup = dimLookup.sort() : sortedLookup;
}
private String getActualValue(int intermediateValue, boolean idSorted)
{
if (idSorted) {
return sortedLookup().getValueFromSortedId(intermediateValue);
} else {
return dimLookup.getValue(intermediateValue);
}
}
private int getEncodedValue(String fullValue, boolean idSorted)
{
int unsortedId = dimLookup.getId(fullValue);
if (idSorted) {
return sortedLookup().getSortedIdFromUnsortedId(unsortedId);
} else {
return unsortedId;
}
}
}
| |
/*
* Copyright (c) 2016 Gridtec. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package at.gridtec.lambda4j.function.bi.obj;
import at.gridtec.lambda4j.Lambda;
import at.gridtec.lambda4j.consumer.ByteConsumer;
import at.gridtec.lambda4j.consumer.bi.obj.ObjFloatConsumer;
import at.gridtec.lambda4j.function.BooleanFunction;
import at.gridtec.lambda4j.function.ByteFunction;
import at.gridtec.lambda4j.function.CharFunction;
import at.gridtec.lambda4j.function.FloatFunction;
import at.gridtec.lambda4j.function.ShortFunction;
import at.gridtec.lambda4j.function.bi.BiFunction2;
import at.gridtec.lambda4j.function.bi.conversion.BiBooleanToByteFunction;
import at.gridtec.lambda4j.function.bi.conversion.BiCharToByteFunction;
import at.gridtec.lambda4j.function.bi.conversion.BiDoubleToByteFunction;
import at.gridtec.lambda4j.function.bi.conversion.BiFloatToByteFunction;
import at.gridtec.lambda4j.function.bi.conversion.BiIntToByteFunction;
import at.gridtec.lambda4j.function.bi.conversion.BiLongToByteFunction;
import at.gridtec.lambda4j.function.bi.conversion.BiShortToByteFunction;
import at.gridtec.lambda4j.function.bi.to.ToByteBiFunction;
import at.gridtec.lambda4j.function.conversion.BooleanToFloatFunction;
import at.gridtec.lambda4j.function.conversion.ByteToCharFunction;
import at.gridtec.lambda4j.function.conversion.ByteToDoubleFunction;
import at.gridtec.lambda4j.function.conversion.ByteToFloatFunction;
import at.gridtec.lambda4j.function.conversion.ByteToIntFunction;
import at.gridtec.lambda4j.function.conversion.ByteToLongFunction;
import at.gridtec.lambda4j.function.conversion.ByteToShortFunction;
import at.gridtec.lambda4j.function.conversion.CharToFloatFunction;
import at.gridtec.lambda4j.function.conversion.DoubleToFloatFunction;
import at.gridtec.lambda4j.function.conversion.FloatToByteFunction;
import at.gridtec.lambda4j.function.conversion.IntToFloatFunction;
import at.gridtec.lambda4j.function.conversion.LongToFloatFunction;
import at.gridtec.lambda4j.function.conversion.ShortToFloatFunction;
import at.gridtec.lambda4j.function.to.ToByteFunction;
import at.gridtec.lambda4j.function.to.ToFloatFunction;
import at.gridtec.lambda4j.operator.binary.ByteBinaryOperator;
import at.gridtec.lambda4j.operator.unary.ByteUnaryOperator;
import at.gridtec.lambda4j.operator.unary.FloatUnaryOperator;
import at.gridtec.lambda4j.predicate.BytePredicate;
import at.gridtec.lambda4j.predicate.bi.obj.ObjFloatPredicate;
import org.apache.commons.lang3.tuple.Pair;
import javax.annotation.Nonnegative;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.DoubleFunction;
import java.util.function.Function;
import java.util.function.IntFunction;
import java.util.function.LongFunction;
/**
* Represents an operation that accepts one object-valued and one {@code float}-valued input argument and produces a
* {@code byte}-valued result.
* This is a (reference, float) specialization of {@link BiFunction2}.
* <p>
* This is a {@link FunctionalInterface} whose functional method is {@link #applyAsByte(Object, float)}.
*
* @param <T> The type of the first argument to the function
* @see BiFunction2
*/
@SuppressWarnings("unused")
@FunctionalInterface
public interface ObjFloatToByteFunction<T> extends Lambda {
/**
* Constructs a {@link ObjFloatToByteFunction} based on a lambda expression or a method reference. Thereby the given
* lambda expression or method reference is returned on an as-is basis to implicitly transform it to the desired
* type. With this method, it is possible to ensure that correct type is used from lambda expression or method
* reference.
*
* @param <T> The type of the first argument to the function
* @param expression A lambda expression or (typically) a method reference, e.g. {@code this::method}
* @return A {@code ObjFloatToByteFunction} from given lambda expression or method reference.
* @implNote This implementation allows the given argument to be {@code null}, but only if {@code null} given,
* {@code null} will be returned.
* @see <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/lambdaexpressions.html#syntax">Lambda
* Expression</a>
* @see <a href="https://docs.oracle.com/javase/tutorial/java/javaOO/methodreferences.html">Method Reference</a>
*/
static <T> ObjFloatToByteFunction<T> of(@Nullable final ObjFloatToByteFunction<T> expression) {
return expression;
}
/**
* Calls the given {@link ObjFloatToByteFunction} with the given arguments and returns its result.
*
* @param <T> The type of the first argument to the function
* @param function The function to be called
* @param t The first argument to the function
* @param value The second argument to the function
* @return The result from the given {@code ObjFloatToByteFunction}.
* @throws NullPointerException If given argument is {@code null}
*/
static <T> byte call(@Nonnull final ObjFloatToByteFunction<? super T> function, T t, float value) {
Objects.requireNonNull(function);
return function.applyAsByte(t, value);
}
/**
* Creates a {@link ObjFloatToByteFunction} which uses the {@code first} parameter of this one as argument for the
* given {@link ToByteFunction}.
*
* @param <T> The type of the first argument to the function
* @param function The function which accepts the {@code first} parameter of this one
* @return Creates a {@code ObjFloatToByteFunction} which uses the {@code first} parameter of this one as argument
* for the given {@code ToByteFunction}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
static <T> ObjFloatToByteFunction<T> onlyFirst(@Nonnull final ToByteFunction<? super T> function) {
Objects.requireNonNull(function);
return (t, value) -> function.applyAsByte(t);
}
/**
* Creates a {@link ObjFloatToByteFunction} which uses the {@code second} parameter of this one as argument for the
* given {@link FloatToByteFunction}.
*
* @param <T> The type of the first argument to the function
* @param function The function which accepts the {@code second} parameter of this one
* @return Creates a {@code ObjFloatToByteFunction} which uses the {@code second} parameter of this one as argument
* for the given {@code FloatToByteFunction}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
static <T> ObjFloatToByteFunction<T> onlySecond(@Nonnull final FloatToByteFunction function) {
Objects.requireNonNull(function);
return (t, value) -> function.applyAsByte(value);
}
/**
* Creates a {@link ObjFloatToByteFunction} which always returns a given value.
*
* @param <T> The type of the first argument to the function
* @param ret The return value for the constant
* @return A {@code ObjFloatToByteFunction} which always returns a given value.
*/
@Nonnull
static <T> ObjFloatToByteFunction<T> constant(byte ret) {
return (t, value) -> ret;
}
/**
* Applies this function to the given arguments.
*
* @param t The first argument to the function
* @param value The second argument to the function
* @return The return value from the function, which is its result.
*/
byte applyAsByte(T t, float value);
/**
* Applies this function partially to some arguments of this one, producing a {@link FloatToByteFunction} as result.
*
* @param t The first argument to this function used to partially apply this function
* @return A {@code FloatToByteFunction} that represents this function partially applied the some arguments.
*/
@Nonnull
default FloatToByteFunction papplyAsByte(T t) {
return (value) -> this.applyAsByte(t, value);
}
/**
* Applies this function partially to some arguments of this one, producing a {@link ToByteFunction} as result.
*
* @param value The second argument to this function used to partially apply this function
* @return A {@code ToByteFunction} that represents this function partially applied the some arguments.
*/
@Nonnull
default ToByteFunction<T> papplyAsByte(float value) {
return (t) -> this.applyAsByte(t, value);
}
/**
* Returns the number of arguments for this function.
*
* @return The number of arguments for this function.
* @implSpec The default implementation always returns {@code 2}.
*/
@Nonnegative
default int arity() {
return 2;
}
/**
* Returns a composed {@link ToByteBiFunction} that first applies the {@code before} functions to its input, and
* then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
*
* @param <A> The type of the argument to the first given function, and of composed function
* @param <B> The type of the argument to the second given function, and of composed function
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code ToByteBiFunction} that first applies the {@code before} functions to its input, and
* then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is able to handle every type.
*/
@Nonnull
default <A, B> ToByteBiFunction<A, B> compose(@Nonnull final Function<? super A, ? extends T> before1,
@Nonnull final ToFloatFunction<? super B> before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (a, b) -> applyAsByte(before1.apply(a), before2.applyAsFloat(b));
}
/**
* Returns a composed {@link BiBooleanToByteFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* execute an operation which accepts {@code boolean} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiBooleanToByteFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* boolean}.
*/
@Nonnull
default BiBooleanToByteFunction composeFromBoolean(@Nonnull final BooleanFunction<? extends T> before1,
@Nonnull final BooleanToFloatFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsByte(before1.apply(value1), before2.applyAsFloat(value2));
}
/**
* Returns a composed {@link ByteBinaryOperator} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code byte} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code ByteBinaryOperator} that first applies the {@code before} functions to its input, and
* then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* byte}.
*/
@Nonnull
default ByteBinaryOperator composeFromByte(@Nonnull final ByteFunction<? extends T> before1,
@Nonnull final ByteToFloatFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsByte(before1.apply(value1), before2.applyAsFloat(value2));
}
/**
* Returns a composed {@link BiCharToByteFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code char} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiCharToByteFunction} that first applies the {@code before} functions to its input, and
* then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* char}.
*/
@Nonnull
default BiCharToByteFunction composeFromChar(@Nonnull final CharFunction<? extends T> before1,
@Nonnull final CharToFloatFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsByte(before1.apply(value1), before2.applyAsFloat(value2));
}
/**
* Returns a composed {@link BiDoubleToByteFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* execute an operation which accepts {@code double} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiDoubleToByteFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* double}.
*/
@Nonnull
default BiDoubleToByteFunction composeFromDouble(@Nonnull final DoubleFunction<? extends T> before1,
@Nonnull final DoubleToFloatFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsByte(before1.apply(value1), before2.applyAsFloat(value2));
}
/**
* Returns a composed {@link BiFloatToByteFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* execute an operation which accepts {@code float} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second operator to apply before this function is applied
* @return A composed {@code BiFloatToByteFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* float}.
*/
@Nonnull
default BiFloatToByteFunction composeFromFloat(@Nonnull final FloatFunction<? extends T> before1,
@Nonnull final FloatUnaryOperator before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsByte(before1.apply(value1), before2.applyAsFloat(value2));
}
/**
* Returns a composed {@link BiIntToByteFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code int} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiIntToByteFunction} that first applies the {@code before} functions to its input, and
* then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* int}.
*/
@Nonnull
default BiIntToByteFunction composeFromInt(@Nonnull final IntFunction<? extends T> before1,
@Nonnull final IntToFloatFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsByte(before1.apply(value1), before2.applyAsFloat(value2));
}
/**
* Returns a composed {@link BiLongToByteFunction} that first applies the {@code before} functions to
* its input, and then applies this function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
* This method is just convenience, to provide the ability to execute an operation which accepts {@code long} input,
* before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiLongToByteFunction} that first applies the {@code before} functions to its input, and
* then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* long}.
*/
@Nonnull
default BiLongToByteFunction composeFromLong(@Nonnull final LongFunction<? extends T> before1,
@Nonnull final LongToFloatFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsByte(before1.apply(value1), before2.applyAsFloat(value2));
}
/**
* Returns a composed {@link BiShortToByteFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* execute an operation which accepts {@code short} input, before this primitive function is executed.
*
* @param before1 The first function to apply before this function is applied
* @param before2 The second function to apply before this function is applied
* @return A composed {@code BiShortToByteFunction} that first applies the {@code before} functions to its input,
* and then applies this function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to handle primitive values. In this case this is {@code
* short}.
*/
@Nonnull
default BiShortToByteFunction composeFromShort(@Nonnull final ShortFunction<? extends T> before1,
@Nonnull final ShortToFloatFunction before2) {
Objects.requireNonNull(before1);
Objects.requireNonNull(before2);
return (value1, value2) -> applyAsByte(before1.apply(value1), before2.applyAsFloat(value2));
}
/**
* Returns a composed {@link ObjFloatFunction} that first applies this function to its input, and then applies the
* {@code after} function to the result.
* If evaluation of either operation throws an exception, it is relayed to the caller of the composed operation.
*
* @param <S> The type of return value from the {@code after} function, and of the composed function
* @param after The function to apply after this function is applied
* @return A composed {@code ObjFloatFunction} that first applies this function to its input, and then applies the
* {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is able to return every type.
*/
@Nonnull
default <S> ObjFloatFunction<T, S> andThen(@Nonnull final ByteFunction<? extends S> after) {
Objects.requireNonNull(after);
return (t, value) -> after.apply(applyAsByte(t, value));
}
/**
* Returns a composed {@link ObjFloatPredicate} that first applies this function to its input, and then applies the
* {@code after} predicate to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code boolean}.
*
* @param after The predicate to apply after this function is applied
* @return A composed {@code ObjFloatPredicate} that first applies this function to its input, and then applies the
* {@code after} predicate to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* boolean}.
*/
@Nonnull
default ObjFloatPredicate<T> andThenToBoolean(@Nonnull final BytePredicate after) {
Objects.requireNonNull(after);
return (t, value) -> after.test(applyAsByte(t, value));
}
/**
* Returns a composed {@link ObjFloatToByteFunction} that first applies this function to its input, and then applies
* the {@code after} operator to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code byte}.
*
* @param after The operator to apply after this function is applied
* @return A composed {@code ObjFloatToByteFunction} that first applies this function to its input, and then applies
* the {@code after} operator to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* byte}.
*/
@Nonnull
default ObjFloatToByteFunction<T> andThenToByte(@Nonnull final ByteUnaryOperator after) {
Objects.requireNonNull(after);
return (t, value) -> after.applyAsByte(applyAsByte(t, value));
}
/**
* Returns a composed {@link ObjFloatToCharFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code char}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ObjFloatToCharFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* char}.
*/
@Nonnull
default ObjFloatToCharFunction<T> andThenToChar(@Nonnull final ByteToCharFunction after) {
Objects.requireNonNull(after);
return (t, value) -> after.applyAsChar(applyAsByte(t, value));
}
/**
* Returns a composed {@link ObjFloatToDoubleFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code double}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ObjFloatToDoubleFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* double}.
*/
@Nonnull
default ObjFloatToDoubleFunction<T> andThenToDouble(@Nonnull final ByteToDoubleFunction after) {
Objects.requireNonNull(after);
return (t, value) -> after.applyAsDouble(applyAsByte(t, value));
}
/**
* Returns a composed {@link ObjFloatToFloatFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code float}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ObjFloatToFloatFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* float}.
*/
@Nonnull
default ObjFloatToFloatFunction<T> andThenToFloat(@Nonnull final ByteToFloatFunction after) {
Objects.requireNonNull(after);
return (t, value) -> after.applyAsFloat(applyAsByte(t, value));
}
/**
* Returns a composed {@link ObjFloatToIntFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code int}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ObjFloatToIntFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* int}.
*/
@Nonnull
default ObjFloatToIntFunction<T> andThenToInt(@Nonnull final ByteToIntFunction after) {
Objects.requireNonNull(after);
return (t, value) -> after.applyAsInt(applyAsByte(t, value));
}
/**
* Returns a composed {@link ObjFloatToLongFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result. If evaluation of either operation throws an exception, it is relayed to
* the caller of the composed operation. This method is just convenience, to provide the ability to transform this
* primitive function to an operation returning {@code long}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ObjFloatToLongFunction} that first applies this function to its input, and then applies
* the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* long}.
*/
@Nonnull
default ObjFloatToLongFunction<T> andThenToLong(@Nonnull final ByteToLongFunction after) {
Objects.requireNonNull(after);
return (t, value) -> after.applyAsLong(applyAsByte(t, value));
}
/**
* Returns a composed {@link ObjFloatToShortFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result. If evaluation of either operation throws an exception, it is
* relayed to the caller of the composed operation. This method is just convenience, to provide the ability to
* transform this primitive function to an operation returning {@code short}.
*
* @param after The function to apply after this function is applied
* @return A composed {@code ObjFloatToShortFunction} that first applies this function to its input, and then
* applies the {@code after} function to the result.
* @throws NullPointerException If given argument is {@code null}
* @implSpec The input argument of this method is a able to return primitive values. In this case this is {@code
* short}.
*/
@Nonnull
default ObjFloatToShortFunction<T> andThenToShort(@Nonnull final ByteToShortFunction after) {
Objects.requireNonNull(after);
return (t, value) -> after.applyAsShort(applyAsByte(t, value));
}
/**
* Returns a composed {@link ObjFloatConsumer} that fist applies this function to its input, and then consumes the
* result using the given {@link ByteConsumer}. If evaluation of either operation throws an exception, it is relayed
* to the caller of the composed operation.
*
* @param consumer The operation which consumes the result from this operation
* @return A composed {@code ObjFloatConsumer} that first applies this function to its input, and then consumes the
* result using the given {@code ByteConsumer}.
* @throws NullPointerException If given argument is {@code null}
*/
@Nonnull
default ObjFloatConsumer<T> consume(@Nonnull final ByteConsumer consumer) {
Objects.requireNonNull(consumer);
return (t, value) -> consumer.accept(applyAsByte(t, value));
}
/**
* Returns a memoized (caching) version of this {@link ObjFloatToByteFunction}. Whenever it is called, the mapping
* between the input parameters and the return value is preserved in a cache, making subsequent calls returning the
* memoized value instead of computing the return value again.
* <p>
* Unless the function and therefore the used cache will be garbage-collected, it will keep all memoized values
* forever.
*
* @return A memoized (caching) version of this {@code ObjFloatToByteFunction}.
* @implSpec This implementation does not allow the input parameters or return value to be {@code null} for the
* resulting memoized function, as the cache used internally does not permit {@code null} keys or values.
* @implNote The returned memoized function can be safely used concurrently from multiple threads which makes it
* thread-safe.
*/
@Nonnull
default ObjFloatToByteFunction<T> memoized() {
if (isMemoized()) {
return this;
} else {
final Map<Pair<T, Float>, Byte> cache = new ConcurrentHashMap<>();
final Object lock = new Object();
return (ObjFloatToByteFunction<T> & Memoized) (t, value) -> {
final byte returnValue;
synchronized (lock) {
returnValue = cache.computeIfAbsent(Pair.of(t, value),
key -> applyAsByte(key.getLeft(), key.getRight()));
}
return returnValue;
};
}
}
/**
* Returns a composed {@link BiFunction2} which represents this {@link ObjFloatToByteFunction}. Thereby the
* primitive input argument for this function is autoboxed. This method provides the possibility to use this {@code
* ObjFloatToByteFunction} with methods provided by the {@code JDK}.
*
* @return A composed {@code BiFunction2} which represents this {@code ObjFloatToByteFunction}.
*/
@Nonnull
default BiFunction2<T, Float, Byte> boxed() {
return this::applyAsByte;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.EnumSet;
import org.apache.hadoop.hdfs.StripedFileTestUtil;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocolPB.PBHelperClient;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoContiguous;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoStriped;
import org.apache.hadoop.hdfs.protocol.BlockType;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
import org.junit.Assert;
import org.apache.hadoop.fs.permission.PermissionStatus;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSOutputStream;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.client.HdfsDataOutputStream.SyncFlag;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BlockUCState;
import org.apache.hadoop.hdfs.server.namenode.LeaseManager.Lease;
import org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeDirType;
import org.apache.hadoop.hdfs.server.namenode.FsImageProto.INodeSection;
import org.apache.hadoop.hdfs.util.MD5FileUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.PathUtils;
import org.apache.hadoop.util.Time;
import org.junit.Test;
import static org.junit.Assert.assertArrayEquals;
public class TestFSImage {
private static final String HADOOP_2_7_ZER0_BLOCK_SIZE_TGZ =
"image-with-zero-block-size.tar.gz";
private static final ErasureCodingPolicy testECPolicy =
SystemErasureCodingPolicies.getByID(
SystemErasureCodingPolicies.RS_10_4_POLICY_ID);
@Test
public void testPersist() throws IOException {
Configuration conf = new Configuration();
testPersistHelper(conf);
}
@Test
public void testCompression() throws IOException {
Configuration conf = new Configuration();
conf.setBoolean(DFSConfigKeys.DFS_IMAGE_COMPRESS_KEY, true);
setCompressCodec(conf, "org.apache.hadoop.io.compress.DefaultCodec");
setCompressCodec(conf, "org.apache.hadoop.io.compress.GzipCodec");
setCompressCodec(conf, "org.apache.hadoop.io.compress.BZip2Codec");
setCompressCodec(conf, "org.apache.hadoop.io.compress.Lz4Codec");
}
private void setCompressCodec(Configuration conf, String compressCodec)
throws IOException {
conf.set(DFSConfigKeys.DFS_IMAGE_COMPRESSION_CODEC_KEY, compressCodec);
testPersistHelper(conf);
}
private void testPersistHelper(Configuration conf) throws IOException {
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).build();
cluster.waitActive();
FSNamesystem fsn = cluster.getNamesystem();
DistributedFileSystem fs = cluster.getFileSystem();
final Path dir = new Path("/abc/def");
final Path file1 = new Path(dir, "f1");
final Path file2 = new Path(dir, "f2");
// create an empty file f1
fs.create(file1).close();
// create an under-construction file f2
FSDataOutputStream out = fs.create(file2);
out.writeBytes("hello");
((DFSOutputStream) out.getWrappedStream()).hsync(EnumSet
.of(SyncFlag.UPDATE_LENGTH));
// checkpoint
fs.setSafeMode(SafeModeAction.SAFEMODE_ENTER);
fs.saveNamespace();
fs.setSafeMode(SafeModeAction.SAFEMODE_LEAVE);
cluster.restartNameNode();
cluster.waitActive();
fs = cluster.getFileSystem();
assertTrue(fs.isDirectory(dir));
assertTrue(fs.exists(file1));
assertTrue(fs.exists(file2));
// check internals of file2
INodeFile file2Node = fsn.dir.getINode4Write(file2.toString()).asFile();
assertEquals("hello".length(), file2Node.computeFileSize());
assertTrue(file2Node.isUnderConstruction());
BlockInfo[] blks = file2Node.getBlocks();
assertEquals(1, blks.length);
assertEquals(BlockUCState.UNDER_CONSTRUCTION, blks[0].getBlockUCState());
// check lease manager
Lease lease = fsn.leaseManager.getLease(file2Node);
Assert.assertNotNull(lease);
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
private void testSaveAndLoadStripedINodeFile(FSNamesystem fsn, Configuration conf,
boolean isUC) throws IOException{
// Construct an INode with StripedBlock for saving and loading
fsn.setErasureCodingPolicy("/", testECPolicy.getName(), false);
long id = 123456789;
byte[] name = "testSaveAndLoadInodeFile_testfile".getBytes();
PermissionStatus permissionStatus = new PermissionStatus("testuser_a",
"testuser_groups", new FsPermission((short)0x755));
long mtime = 1426222916-3600;
long atime = 1426222916;
BlockInfoContiguous[] blocks = new BlockInfoContiguous[0];
byte erasureCodingPolicyID = testECPolicy.getId();
long preferredBlockSize = 128*1024*1024;
INodeFile file = new INodeFile(id, name, permissionStatus, mtime, atime,
blocks, null, erasureCodingPolicyID, preferredBlockSize,
(byte) 0, BlockType.STRIPED);
ByteArrayOutputStream bs = new ByteArrayOutputStream();
// Construct StripedBlocks for the INode
BlockInfoStriped[] stripedBlocks = new BlockInfoStriped[3];
long stripedBlkId = 10000001;
long timestamp = mtime+3600;
for (int i = 0; i < stripedBlocks.length; i++) {
stripedBlocks[i] = new BlockInfoStriped(
new Block(stripedBlkId + i, preferredBlockSize, timestamp),
testECPolicy);
file.addBlock(stripedBlocks[i]);
}
final String client = "testClient";
final String clientMachine = "testClientMachine";
final String path = "testUnderConstructionPath";
// Save the INode to byte array
DataOutput out = new DataOutputStream(bs);
if (isUC) {
file.toUnderConstruction(client, clientMachine);
FSImageSerialization.writeINodeUnderConstruction((DataOutputStream) out,
file, path);
} else {
FSImageSerialization.writeINodeFile(file, out, false);
}
DataInput in = new DataInputStream(
new ByteArrayInputStream(bs.toByteArray()));
// load the INode from the byte array
INodeFile fileByLoaded;
if (isUC) {
fileByLoaded = FSImageSerialization.readINodeUnderConstruction(in,
fsn, fsn.getFSImage().getLayoutVersion());
} else {
fileByLoaded = (INodeFile) new FSImageFormat.Loader(conf, fsn)
.loadINodeWithLocalName(false, in, false);
}
assertEquals(id, fileByLoaded.getId() );
assertArrayEquals(isUC ? path.getBytes() : name,
fileByLoaded.getLocalName().getBytes());
assertEquals(permissionStatus.getUserName(),
fileByLoaded.getPermissionStatus().getUserName());
assertEquals(permissionStatus.getGroupName(),
fileByLoaded.getPermissionStatus().getGroupName());
assertEquals(permissionStatus.getPermission(),
fileByLoaded.getPermissionStatus().getPermission());
assertEquals(mtime, fileByLoaded.getModificationTime());
assertEquals(isUC ? mtime : atime, fileByLoaded.getAccessTime());
// TODO for striped blocks, we currently save and load them as contiguous
// blocks to/from legacy fsimage
assertEquals(3, fileByLoaded.getBlocks().length);
assertEquals(preferredBlockSize, fileByLoaded.getPreferredBlockSize());
assertEquals(file.getFileReplication(), fileByLoaded.getFileReplication());
if (isUC) {
assertEquals(client,
fileByLoaded.getFileUnderConstructionFeature().getClientName());
assertEquals(clientMachine,
fileByLoaded.getFileUnderConstructionFeature().getClientMachine());
}
}
/**
* Test if a INodeFile with BlockInfoStriped can be saved by
* FSImageSerialization and loaded by FSImageFormat#Loader.
*/
@Test
public void testSaveAndLoadStripedINodeFile() throws IOException{
Configuration conf = new Configuration();
DFSTestUtil.enableAllECPolicies(conf);
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).build();
cluster.waitActive();
testSaveAndLoadStripedINodeFile(cluster.getNamesystem(), conf, false);
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
/**
* Test if a INodeFileUnderConstruction with BlockInfoStriped can be
* saved and loaded by FSImageSerialization
*/
@Test
public void testSaveAndLoadStripedINodeFileUC() throws IOException {
// construct a INode with StripedBlock for saving and loading
Configuration conf = new Configuration();
DFSTestUtil.enableAllECPolicies(conf);
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).build();
cluster.waitActive();
testSaveAndLoadStripedINodeFile(cluster.getNamesystem(), conf, true);
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
/**
* On checkpointing , stale fsimage checkpoint file should be deleted.
*/
@Test
public void testRemovalStaleFsimageCkpt() throws IOException {
MiniDFSCluster cluster = null;
SecondaryNameNode secondary = null;
Configuration conf = new HdfsConfiguration();
try {
cluster = new MiniDFSCluster.Builder(conf).
numDataNodes(1).format(true).build();
conf.set(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY,
"0.0.0.0:0");
secondary = new SecondaryNameNode(conf);
// Do checkpointing
secondary.doCheckpoint();
NNStorage storage = secondary.getFSImage().storage;
File currentDir = FSImageTestUtil.
getCurrentDirs(storage, NameNodeDirType.IMAGE).get(0);
// Create a stale fsimage.ckpt file
File staleCkptFile = new File(currentDir.getPath() +
"/fsimage.ckpt_0000000000000000002");
staleCkptFile.createNewFile();
assertTrue(staleCkptFile.exists());
// After checkpoint stale fsimage.ckpt file should be deleted
secondary.doCheckpoint();
assertFalse(staleCkptFile.exists());
} finally {
if (secondary != null) {
secondary.shutdown();
secondary = null;
}
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
}
}
/**
* Ensure that the digest written by the saver equals to the digest of the
* file.
*/
@Test
public void testDigest() throws IOException {
Configuration conf = new Configuration();
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0).build();
DistributedFileSystem fs = cluster.getFileSystem();
fs.setSafeMode(SafeModeAction.SAFEMODE_ENTER);
fs.saveNamespace();
fs.setSafeMode(SafeModeAction.SAFEMODE_LEAVE);
File currentDir = FSImageTestUtil.getNameNodeCurrentDirs(cluster, 0).get(
0);
File fsimage = FSImageTestUtil.findNewestImageFile(currentDir
.getAbsolutePath());
assertEquals(MD5FileUtils.readStoredMd5ForFile(fsimage),
MD5FileUtils.computeMd5ForFile(fsimage));
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
/**
* Ensure mtime and atime can be loaded from fsimage.
*/
@Test(timeout=60000)
public void testLoadMtimeAtime() throws Exception {
Configuration conf = new Configuration();
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
cluster.waitActive();
DistributedFileSystem hdfs = cluster.getFileSystem();
String userDir = hdfs.getHomeDirectory().toUri().getPath().toString();
Path file = new Path(userDir, "file");
Path dir = new Path(userDir, "/dir");
Path link = new Path(userDir, "/link");
hdfs.createNewFile(file);
hdfs.mkdirs(dir);
hdfs.createSymlink(file, link, false);
long mtimeFile = hdfs.getFileStatus(file).getModificationTime();
long atimeFile = hdfs.getFileStatus(file).getAccessTime();
long mtimeDir = hdfs.getFileStatus(dir).getModificationTime();
long mtimeLink = hdfs.getFileLinkStatus(link).getModificationTime();
long atimeLink = hdfs.getFileLinkStatus(link).getAccessTime();
// save namespace and restart cluster
hdfs.setSafeMode(HdfsConstants.SafeModeAction.SAFEMODE_ENTER);
hdfs.saveNamespace();
hdfs.setSafeMode(HdfsConstants.SafeModeAction.SAFEMODE_LEAVE);
cluster.shutdown();
cluster = new MiniDFSCluster.Builder(conf).format(false)
.numDataNodes(1).build();
cluster.waitActive();
hdfs = cluster.getFileSystem();
assertEquals(mtimeFile, hdfs.getFileStatus(file).getModificationTime());
assertEquals(atimeFile, hdfs.getFileStatus(file).getAccessTime());
assertEquals(mtimeDir, hdfs.getFileStatus(dir).getModificationTime());
assertEquals(mtimeLink, hdfs.getFileLinkStatus(link).getModificationTime());
assertEquals(atimeLink, hdfs.getFileLinkStatus(link).getAccessTime());
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
/**
* Ensure ctime is set during namenode formatting.
*/
@Test(timeout=60000)
public void testCtime() throws Exception {
Configuration conf = new Configuration();
MiniDFSCluster cluster = null;
try {
final long pre = Time.now();
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
cluster.waitActive();
final long post = Time.now();
final long ctime = cluster.getNamesystem().getCTime();
assertTrue(pre <= ctime);
assertTrue(ctime <= post);
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
/**
* In this test case, I have created an image with a file having
* preferredblockSize = 0. We are trying to read this image (since file with
* preferredblockSize = 0 was allowed pre 2.1.0-beta version. The namenode
* after 2.6 version will not be able to read this particular file.
* See HDFS-7788 for more information.
* @throws Exception
*/
@Test
public void testZeroBlockSize() throws Exception {
final Configuration conf = new HdfsConfiguration();
String tarFile = System.getProperty("test.cache.data", "build/test/cache")
+ "/" + HADOOP_2_7_ZER0_BLOCK_SIZE_TGZ;
String testDir = PathUtils.getTestDirName(getClass());
File dfsDir = new File(testDir, "image-with-zero-block-size");
if (dfsDir.exists() && !FileUtil.fullyDelete(dfsDir)) {
throw new IOException("Could not delete dfs directory '" + dfsDir + "'");
}
FileUtil.unTar(new File(tarFile), new File(testDir));
File nameDir = new File(dfsDir, "name");
GenericTestUtils.assertExists(nameDir);
conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY,
nameDir.getAbsolutePath());
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1)
.format(false)
.manageDataDfsDirs(false)
.manageNameDfsDirs(false)
.waitSafeMode(false).startupOption(StartupOption.UPGRADE)
.build();
try {
FileSystem fs = cluster.getFileSystem();
Path testPath = new Path("/tmp/zeroBlockFile");
assertTrue("File /tmp/zeroBlockFile doesn't exist ", fs.exists(testPath));
assertTrue("Name node didn't come up", cluster.isNameNodeUp(0));
} finally {
cluster.shutdown();
//Clean up
FileUtil.fullyDelete(dfsDir);
}
}
/**
* Ensure that FSImage supports BlockGroup.
*/
@Test(timeout = 60000)
public void testSupportBlockGroup() throws Exception {
final short GROUP_SIZE = (short) (testECPolicy.getNumDataUnits() +
testECPolicy.getNumParityUnits());
final int BLOCK_SIZE = 8 * 1024 * 1024;
Configuration conf = new HdfsConfiguration();
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCK_SIZE);
DFSTestUtil.enableAllECPolicies(conf);
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(GROUP_SIZE)
.build();
cluster.waitActive();
DistributedFileSystem fs = cluster.getFileSystem();
Path parentDir = new Path("/ec-10-4");
Path childDir = new Path(parentDir, "ec-3-2");
ErasureCodingPolicy ec32Policy = SystemErasureCodingPolicies
.getByID(SystemErasureCodingPolicies.RS_3_2_POLICY_ID);
// Create directories and files
fs.mkdirs(parentDir);
fs.mkdirs(childDir);
fs.setErasureCodingPolicy(parentDir, testECPolicy.getName());
fs.setErasureCodingPolicy(childDir, ec32Policy.getName());
Path file_10_4 = new Path(parentDir, "striped_file_10_4");
Path file_3_2 = new Path(childDir, "striped_file_3_2");
// Write content to files
byte[] bytes = StripedFileTestUtil.generateBytes(BLOCK_SIZE);
DFSTestUtil.writeFile(fs, file_10_4, new String(bytes));
DFSTestUtil.writeFile(fs, file_3_2, new String(bytes));
// Save namespace and restart NameNode
fs.setSafeMode(SafeModeAction.SAFEMODE_ENTER);
fs.saveNamespace();
fs.setSafeMode(SafeModeAction.SAFEMODE_LEAVE);
cluster.restartNameNodes();
fs = cluster.getFileSystem();
assertTrue(fs.exists(file_10_4));
assertTrue(fs.exists(file_3_2));
// check the information of file_10_4
FSNamesystem fsn = cluster.getNamesystem();
INodeFile inode = fsn.dir.getINode(file_10_4.toString()).asFile();
assertTrue(inode.isStriped());
assertEquals(testECPolicy.getId(), inode.getErasureCodingPolicyID());
BlockInfo[] blks = inode.getBlocks();
assertEquals(1, blks.length);
assertTrue(blks[0].isStriped());
assertEquals(testECPolicy.getId(),
fs.getErasureCodingPolicy(file_10_4).getId());
assertEquals(testECPolicy.getId(),
((BlockInfoStriped)blks[0]).getErasureCodingPolicy().getId());
assertEquals(testECPolicy.getNumDataUnits(),
((BlockInfoStriped) blks[0]).getDataBlockNum());
assertEquals(testECPolicy.getNumParityUnits(),
((BlockInfoStriped) blks[0]).getParityBlockNum());
byte[] content = DFSTestUtil.readFileAsBytes(fs, file_10_4);
assertArrayEquals(bytes, content);
// check the information of file_3_2
inode = fsn.dir.getINode(file_3_2.toString()).asFile();
assertTrue(inode.isStriped());
assertEquals(SystemErasureCodingPolicies.getByID(
SystemErasureCodingPolicies.RS_3_2_POLICY_ID).getId(),
inode.getErasureCodingPolicyID());
blks = inode.getBlocks();
assertEquals(1, blks.length);
assertTrue(blks[0].isStriped());
assertEquals(ec32Policy.getId(),
fs.getErasureCodingPolicy(file_3_2).getId());
assertEquals(ec32Policy.getNumDataUnits(),
((BlockInfoStriped) blks[0]).getDataBlockNum());
assertEquals(ec32Policy.getNumParityUnits(),
((BlockInfoStriped) blks[0]).getParityBlockNum());
content = DFSTestUtil.readFileAsBytes(fs, file_3_2);
assertArrayEquals(bytes, content);
// check the EC policy on parent Dir
ErasureCodingPolicy ecPolicy =
fsn.getErasureCodingPolicy(parentDir.toString());
assertNotNull(ecPolicy);
assertEquals(testECPolicy.getId(), ecPolicy.getId());
// check the EC policy on child Dir
ecPolicy = fsn.getErasureCodingPolicy(childDir.toString());
assertNotNull(ecPolicy);
assertEquals(ec32Policy.getId(), ecPolicy.getId());
// check the EC policy on root directory
ecPolicy = fsn.getErasureCodingPolicy("/");
assertNull(ecPolicy);
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
@Test
public void testHasNonEcBlockUsingStripedIDForLoadFile() throws IOException{
// start a cluster
Configuration conf = new HdfsConfiguration();
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(9)
.build();
cluster.waitActive();
DistributedFileSystem fs = cluster.getFileSystem();
FSNamesystem fns = cluster.getNamesystem();
String testDir = "/test_block_manager";
String testFile = "testfile_loadfile";
String testFilePath = testDir + "/" + testFile;
String clientName = "testUser_loadfile";
String clientMachine = "testMachine_loadfile";
long blkId = -1;
long blkNumBytes = 1024;
long timestamp = 1426222918;
fs.mkdir(new Path(testDir), new FsPermission("755"));
Path p = new Path(testFilePath);
DFSTestUtil.createFile(fs, p, 0, (short) 1, 1);
BlockInfoContiguous cBlk = new BlockInfoContiguous(
new Block(blkId, blkNumBytes, timestamp), (short)3);
INodeFile file = (INodeFile)fns.getFSDirectory().getINode(testFilePath);
file.toUnderConstruction(clientName, clientMachine);
file.addBlock(cBlk);
TestINodeFile.toCompleteFile(file);
fns.enterSafeMode(false);
fns.saveNamespace(0, 0);
cluster.restartNameNodes();
cluster.waitActive();
fns = cluster.getNamesystem();
assertTrue(fns.getBlockManager().hasNonEcBlockUsingStripedID());
//after nonEcBlockUsingStripedID is deleted
//the hasNonEcBlockUsingStripedID is set to false
fs = cluster.getFileSystem();
fs.delete(p,false);
fns.enterSafeMode(false);
fns.saveNamespace(0, 0);
cluster.restartNameNodes();
cluster.waitActive();
fns = cluster.getNamesystem();
assertFalse(fns.getBlockManager().hasNonEcBlockUsingStripedID());
cluster.shutdown();
cluster = null;
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
@Test
public void testHasNonEcBlockUsingStripedIDForLoadUCFile()
throws IOException{
// start a cluster
Configuration conf = new HdfsConfiguration();
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(9)
.build();
cluster.waitActive();
DistributedFileSystem fs = cluster.getFileSystem();
FSNamesystem fns = cluster.getNamesystem();
String testDir = "/test_block_manager";
String testFile = "testfile_loaducfile";
String testFilePath = testDir + "/" + testFile;
String clientName = "testUser_loaducfile";
String clientMachine = "testMachine_loaducfile";
long blkId = -1;
long blkNumBytes = 1024;
long timestamp = 1426222918;
fs.mkdir(new Path(testDir), new FsPermission("755"));
Path p = new Path(testFilePath);
DFSTestUtil.createFile(fs, p, 0, (short) 1, 1);
BlockInfoContiguous cBlk = new BlockInfoContiguous(
new Block(blkId, blkNumBytes, timestamp), (short)3);
INodeFile file = (INodeFile)fns.getFSDirectory().getINode(testFilePath);
file.toUnderConstruction(clientName, clientMachine);
file.addBlock(cBlk);
fns.enterSafeMode(false);
fns.saveNamespace(0, 0);
cluster.restartNameNodes();
cluster.waitActive();
fns = cluster.getNamesystem();
assertTrue(fns.getBlockManager().hasNonEcBlockUsingStripedID());
cluster.shutdown();
cluster = null;
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
@Test
public void testHasNonEcBlockUsingStripedIDForLoadSnapshot()
throws IOException{
// start a cluster
Configuration conf = new HdfsConfiguration();
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(9)
.build();
cluster.waitActive();
DistributedFileSystem fs = cluster.getFileSystem();
FSNamesystem fns = cluster.getNamesystem();
String testDir = "/test_block_manager";
String testFile = "testfile_loadSnapshot";
String testFilePath = testDir + "/" + testFile;
String clientName = "testUser_loadSnapshot";
String clientMachine = "testMachine_loadSnapshot";
long blkId = -1;
long blkNumBytes = 1024;
long timestamp = 1426222918;
Path d = new Path(testDir);
fs.mkdir(d, new FsPermission("755"));
fs.allowSnapshot(d);
Path p = new Path(testFilePath);
DFSTestUtil.createFile(fs, p, 0, (short) 1, 1);
BlockInfoContiguous cBlk = new BlockInfoContiguous(
new Block(blkId, blkNumBytes, timestamp), (short)3);
INodeFile file = (INodeFile)fns.getFSDirectory().getINode(testFilePath);
file.toUnderConstruction(clientName, clientMachine);
file.addBlock(cBlk);
TestINodeFile.toCompleteFile(file);
fs.createSnapshot(d,"testHasNonEcBlockUsingStripeID");
fs.truncate(p,0);
fns.enterSafeMode(false);
fns.saveNamespace(0, 0);
cluster.restartNameNodes();
cluster.waitActive();
fns = cluster.getNamesystem();
assertTrue(fns.getBlockManager().hasNonEcBlockUsingStripedID());
cluster.shutdown();
cluster = null;
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
@Test
public void testBlockTypeProtoDefaultsToContiguous() throws Exception {
INodeSection.INodeFile.Builder builder = INodeSection.INodeFile
.newBuilder();
INodeSection.INodeFile inodeFile = builder.build();
BlockType defaultBlockType = PBHelperClient.convert(inodeFile
.getBlockType());
assertEquals(defaultBlockType, BlockType.CONTIGUOUS);
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.segment;
import com.fasterxml.jackson.databind.InjectableValues;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import io.druid.data.input.MapBasedRow;
import io.druid.data.input.Row;
import io.druid.jackson.DefaultObjectMapper;
import io.druid.java.util.common.StringUtils;
import io.druid.java.util.common.guava.Sequence;
import io.druid.math.expr.ExprMacroTable;
import io.druid.query.Result;
import io.druid.query.expression.TestExprMacroTable;
import io.druid.query.timeseries.TimeseriesResultValue;
import io.druid.query.topn.TopNResultValue;
import io.druid.segment.column.ColumnConfig;
import io.druid.segment.writeout.SegmentWriteOutMediumFactory;
import io.druid.timeline.DataSegment;
import org.junit.Assert;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
/**
*/
public class TestHelper
{
private static final ObjectMapper JSON_MAPPER = makeJsonMapper();
public static IndexMergerV9 getTestIndexMergerV9(SegmentWriteOutMediumFactory segmentWriteOutMediumFactory)
{
return new IndexMergerV9(JSON_MAPPER, getTestIndexIO(segmentWriteOutMediumFactory), segmentWriteOutMediumFactory);
}
public static IndexIO getTestIndexIO(SegmentWriteOutMediumFactory segmentWriteOutMediumFactory)
{
return new IndexIO(
JSON_MAPPER,
segmentWriteOutMediumFactory,
new ColumnConfig()
{
@Override
public int columnCacheSizeBytes()
{
return 0;
}
}
);
}
public static ObjectMapper makeJsonMapper()
{
final ObjectMapper mapper = new DefaultObjectMapper();
mapper.setInjectableValues(
new InjectableValues.Std()
.addValue(ExprMacroTable.class.getName(), TestExprMacroTable.INSTANCE)
.addValue(ObjectMapper.class.getName(), mapper)
.addValue(DataSegment.PruneLoadSpecHolder.class, DataSegment.PruneLoadSpecHolder.DEFAULT)
);
return mapper;
}
public static ObjectMapper makeSmileMapper()
{
final ObjectMapper mapper = new DefaultObjectMapper();
mapper.setInjectableValues(
new InjectableValues.Std()
.addValue(ExprMacroTable.class.getName(), TestExprMacroTable.INSTANCE)
.addValue(ObjectMapper.class.getName(), mapper)
);
return mapper;
}
public static <T> Iterable<T> revert(Iterable<T> input)
{
return Lists.reverse(Lists.newArrayList(input));
}
public static <T> void assertExpectedResults(Iterable<Result<T>> expectedResults, Sequence<Result<T>> results)
{
assertResults(expectedResults, results.toList(), "");
}
public static <T> void assertExpectedResults(Iterable<Result<T>> expectedResults, Iterable<Result<T>> results)
{
assertResults(expectedResults, results, "");
}
public static <T> void assertExpectedResults(
Iterable<Result<T>> expectedResults,
Iterable<Result<T>> results,
String failMsg
)
{
assertResults(expectedResults, results, failMsg);
}
public static <T> void assertExpectedObjects(Iterable<T> expectedResults, Iterable<T> results, String failMsg)
{
assertObjects(expectedResults, results, failMsg);
}
public static <T> void assertExpectedObjects(Iterable<T> expectedResults, Sequence<T> results, String failMsg)
{
assertObjects(expectedResults, results.toList(), failMsg);
}
private static <T> void assertResults(
Iterable<Result<T>> expectedResults,
Iterable<Result<T>> actualResults,
String failMsg
)
{
Iterator<? extends Result> resultsIter = actualResults.iterator();
Iterator<? extends Result> resultsIter2 = actualResults.iterator();
Iterator<? extends Result> expectedResultsIter = expectedResults.iterator();
while (resultsIter.hasNext() && resultsIter2.hasNext() && expectedResultsIter.hasNext()) {
Object expectedNext = expectedResultsIter.next();
final Object next = resultsIter.next();
final Object next2 = resultsIter2.next();
if (expectedNext instanceof Row) {
// HACK! Special casing for groupBy
assertRow(failMsg, (Row) expectedNext, (Row) next);
assertRow(failMsg, (Row) expectedNext, (Row) next2);
} else if (expectedNext instanceof Result
&& (((Result) expectedNext).getValue()) instanceof TimeseriesResultValue) {
// Special case for GroupByTimeseriesQueryRunnerTest to allow a floating point delta to be used
// in result comparison
assertTimeseriesResultValue(failMsg, (Result) expectedNext, (Result) next);
assertTimeseriesResultValue(
StringUtils.format("%s: Second iterator bad, multiple calls to iterator() should be safe", failMsg),
(Result) expectedNext,
(Result) next2
);
} else if (expectedNext instanceof Result
&& (((Result) expectedNext).getValue()) instanceof TopNResultValue) {
// Special to allow a floating point delta to be used in result comparison due to legacy expected results
assertTopNResultValue(failMsg, (Result) expectedNext, (Result) next);
assertTopNResultValue(
StringUtils.format("%s: Second iterator bad, multiple calls to iterator() should be safe", failMsg),
(Result) expectedNext,
(Result) next2
);
} else {
assertResult(failMsg, (Result) expectedNext, (Result) next);
assertResult(
StringUtils.format("%s: Second iterator bad, multiple calls to iterator() should be safe", failMsg),
(Result) expectedNext,
(Result) next2
);
}
}
if (resultsIter.hasNext()) {
Assert.fail(
StringUtils.format("%s: Expected resultsIter to be exhausted, next element was %s", failMsg, resultsIter.next())
);
}
if (resultsIter2.hasNext()) {
Assert.fail(
StringUtils.format("%s: Expected resultsIter2 to be exhausted, next element was %s", failMsg, resultsIter.next())
);
}
if (expectedResultsIter.hasNext()) {
Assert.fail(
StringUtils.format(
"%s: Expected expectedResultsIter to be exhausted, next element was %s",
failMsg,
expectedResultsIter.next()
)
);
}
}
private static <T> void assertObjects(Iterable<T> expectedResults, Iterable<T> actualResults, String msg)
{
Iterator resultsIter = actualResults.iterator();
Iterator resultsIter2 = actualResults.iterator();
Iterator expectedResultsIter = expectedResults.iterator();
int index = 0;
while (resultsIter.hasNext() && resultsIter2.hasNext() && expectedResultsIter.hasNext()) {
Object expectedNext = expectedResultsIter.next();
final Object next = resultsIter.next();
final Object next2 = resultsIter2.next();
String failMsg = msg + "-" + index++;
String failMsg2 = StringUtils.format("%s: Second iterator bad, multiple calls to iterator() should be safe", failMsg);
if (expectedNext instanceof Row) {
// HACK! Special casing for groupBy
assertRow(failMsg, (Row) expectedNext, (Row) next);
assertRow(failMsg2, (Row) expectedNext, (Row) next2);
} else {
Assert.assertEquals(failMsg, expectedNext, next);
Assert.assertEquals(failMsg2, expectedNext, next2);
}
}
if (resultsIter.hasNext()) {
Assert.fail(
StringUtils.format("%s: Expected resultsIter to be exhausted, next element was %s", msg, resultsIter.next())
);
}
if (resultsIter2.hasNext()) {
Assert.fail(
StringUtils.format("%s: Expected resultsIter2 to be exhausted, next element was %s", msg, resultsIter.next())
);
}
if (expectedResultsIter.hasNext()) {
Assert.fail(
StringUtils.format(
"%s: Expected expectedResultsIter to be exhausted, next element was %s",
msg,
expectedResultsIter.next()
)
);
}
}
private static void assertResult(String msg, Result<?> expected, Result actual)
{
Assert.assertEquals(msg, expected, actual);
}
private static void assertTimeseriesResultValue(String msg, Result expected, Result actual)
{
// Custom equals check to get fuzzy comparison of numerics, useful because different groupBy strategies don't
// always generate exactly the same results (different merge ordering / float vs double)
Assert.assertEquals(StringUtils.format("%s: timestamp", msg), expected.getTimestamp(), actual.getTimestamp());
TimeseriesResultValue expectedVal = (TimeseriesResultValue) expected.getValue();
TimeseriesResultValue actualVal = (TimeseriesResultValue) actual.getValue();
final Map<String, Object> expectedMap = (Map<String, Object>) expectedVal.getBaseObject();
final Map<String, Object> actualMap = (Map<String, Object>) actualVal.getBaseObject();
assertRow(msg, new MapBasedRow(expected.getTimestamp(), expectedMap), new MapBasedRow(actual.getTimestamp(), actualMap));
}
private static void assertTopNResultValue(String msg, Result expected, Result actual)
{
TopNResultValue expectedVal = (TopNResultValue) expected.getValue();
TopNResultValue actualVal = (TopNResultValue) actual.getValue();
List<Row> listExpectedRows = expectedVal.getValue()
.stream()
.map(dimensionAndMetricValueExtractor -> new MapBasedRow(
expected.getTimestamp(),
dimensionAndMetricValueExtractor.getBaseObject()
))
.collect(Collectors.toList());
List<Row> listActualRows = actualVal.getValue()
.stream()
.map(dimensionAndMetricValueExtractor -> new MapBasedRow(
actual.getTimestamp(),
dimensionAndMetricValueExtractor.getBaseObject()
))
.collect(Collectors.toList());
Assert.assertEquals("Size of list must match", listExpectedRows.size(), listActualRows.size());
IntStream.range(0, listExpectedRows.size()).forEach(value -> assertRow(
StringUtils.format("%s, on value number [%s]", msg, value),
listExpectedRows.get(value),
listActualRows.get(value)
));
}
private static void assertRow(String msg, Row expected, Row actual)
{
// Custom equals check to get fuzzy comparison of numerics, useful because different groupBy strategies don't
// always generate exactly the same results (different merge ordering / float vs double)
Assert.assertEquals(
StringUtils.format("%s: timestamp", msg),
expected.getTimestamp().getMillis(),
actual.getTimestamp().getMillis()
);
final Map<String, Object> expectedMap = ((MapBasedRow) expected).getEvent();
final Map<String, Object> actualMap = ((MapBasedRow) actual).getEvent();
Assert.assertEquals(StringUtils.format("%s: map keys", msg), expectedMap.keySet(), actualMap.keySet());
for (final String key : expectedMap.keySet()) {
final Object expectedValue = expectedMap.get(key);
final Object actualValue = actualMap.get(key);
if (expectedValue instanceof Float || expectedValue instanceof Double) {
Assert.assertEquals(
StringUtils.format("%s: key[%s]", msg, key),
((Number) expectedValue).doubleValue(),
((Number) actualValue).doubleValue(),
Math.abs(((Number) expectedValue).doubleValue() * 1e-6)
);
} else {
Assert.assertEquals(
StringUtils.format("%s: key[%s]", msg, key),
expectedValue,
actualValue
);
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cloudera.sqoop;
import com.cloudera.sqoop.orm.CompilationManager;
import com.cloudera.sqoop.testutil.BaseSqoopTestCase;
import com.cloudera.sqoop.testutil.CommonArgs;
import com.cloudera.sqoop.testutil.HsqldbTestServer;
import com.cloudera.sqoop.testutil.ImportJobTestCase;
import com.cloudera.sqoop.testutil.SeqFileReader;
import com.cloudera.sqoop.tool.ImportTool;
import com.cloudera.sqoop.util.ClassLoaderStack;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.compress.BZip2Codec;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* Test that compression options (--compress, --compression-codec) work.
*/
public class TestCompression extends ImportJobTestCase {
/**
* Create the argv to pass to Sqoop.
* @return the argv as an array of strings.
*/
protected String [] getArgv(boolean includeHadoopFlags, String [] colNames,
CompressionCodec codec, String fileFormat) {
String columnsString = "";
for (String col : colNames) {
columnsString += col + ",";
}
ArrayList<String> args = new ArrayList<String>();
if (includeHadoopFlags) {
CommonArgs.addHadoopFlags(args);
}
args.add("--table");
args.add(HsqldbTestServer.getTableName());
args.add("--columns");
args.add(columnsString);
args.add("--compress");
if (codec != null) {
args.add("--compression-codec");
args.add(codec.getClass().getName());
}
args.add("--warehouse-dir");
args.add(getWarehouseDir());
args.add("--connect");
args.add(HsqldbTestServer.getUrl());
args.add(fileFormat);
args.add("--num-mappers");
args.add("1");
return args.toArray(new String[0]);
}
// this test just uses the two int table.
protected String getTableName() {
return HsqldbTestServer.getTableName();
}
public void runSequenceFileCompressionTest(CompressionCodec codec,
int expectedNum) throws Exception {
String [] columns = HsqldbTestServer.getFieldNames();
ClassLoader prevClassLoader = null;
SequenceFile.Reader reader = null;
String [] argv = getArgv(true, columns, codec, "--as-sequencefile");
runImport(argv);
try {
SqoopOptions opts = new ImportTool().parseArguments(
getArgv(false, columns, codec, "--as-sequencefile"),
null, null, true);
CompilationManager compileMgr = new CompilationManager(opts);
String jarFileName = compileMgr.getJarFilename();
LOG.debug("Got jar from import job: " + jarFileName);
prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
getTableName());
reader = SeqFileReader.getSeqFileReader(getDataFilePath().toString());
if (codec == null) {
codec = new GzipCodec();
}
assertTrue("Block compressed", reader.isBlockCompressed());
assertEquals(codec.getClass(), reader.getCompressionCodec().getClass());
// here we can actually instantiate (k, v) pairs.
Configuration conf = new Configuration();
Object key = ReflectionUtils.newInstance(reader.getKeyClass(), conf);
Object val = ReflectionUtils.newInstance(reader.getValueClass(), conf);
// We know that these values are two ints separated by a ',' character.
// Since this is all dynamic, though, we don't want to actually link
// against the class and use its methods. So we just parse this back
// into int fields manually. Sum them up and ensure that we get the
// expected total for the first column, to verify that we got all the
// results from the db into the file.
// Sum up everything in the file.
int numLines = 0;
while (reader.next(key) != null) {
reader.getCurrentValue(val);
numLines++;
}
assertEquals(expectedNum, numLines);
} finally {
IOUtils.closeStream(reader);
if (null != prevClassLoader) {
ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
}
}
}
public void runTextCompressionTest(CompressionCodec codec, int expectedNum)
throws IOException {
String [] columns = HsqldbTestServer.getFieldNames();
String [] argv = getArgv(true, columns, codec, "--as-textfile");
runImport(argv);
Configuration conf = new Configuration();
if (!BaseSqoopTestCase.isOnPhysicalCluster()) {
conf.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS);
}
FileSystem fs = FileSystem.get(conf);
if (codec == null) {
codec = new GzipCodec();
}
ReflectionUtils.setConf(codec, getConf());
Path p = new Path(getDataFilePath().toString()
+ codec.getDefaultExtension());
InputStream is = codec.createInputStream(fs.open(p));
BufferedReader r = new BufferedReader(new InputStreamReader(is));
int numLines = 0;
while (true) {
String ln = r.readLine();
if (ln == null) {
break;
}
numLines++;
}
r.close();
assertEquals(expectedNum, numLines);
}
@Test
public void testDefaultTextCompression() throws IOException {
runTextCompressionTest(null, 4);
}
@Test
public void testBzip2TextCompression() throws IOException {
runTextCompressionTest(new BZip2Codec(), 4);
}
@Test
public void testBzip2SequenceFileCompression() throws Exception {
runSequenceFileCompressionTest(new BZip2Codec(), 4);
}
}
| |
/**
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.yahoo.ycsb.db;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.yahoo.ycsb.ByteArrayByteIterator;
import com.yahoo.ycsb.ByteIterator;
import com.yahoo.ycsb.DBException;
import com.yahoo.ycsb.Status;
import com.yahoo.ycsb.measurements.Measurements;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.BufferedMutator;
import org.apache.hadoop.hbase.client.BufferedMutatorParams;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.PageFilter;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
import java.util.ConcurrentModificationException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.Vector;
import java.util.concurrent.atomic.AtomicInteger;
import static com.yahoo.ycsb.workloads.CoreWorkload.TABLENAME_PROPERTY;
import static com.yahoo.ycsb.workloads.CoreWorkload.TABLENAME_PROPERTY_DEFAULT;
/**
* HBase 1.0 client for YCSB framework.
*
* A modified version of HBaseClient (which targets HBase v0.9) utilizing the
* HBase 1.0.0 API.
*
* This client also adds toggleable client-side buffering and configurable write
* durability.
*/
public class HBaseClient10 extends com.yahoo.ycsb.DB {
private static final AtomicInteger THREAD_COUNT = new AtomicInteger(0);
private Configuration config = HBaseConfiguration.create();
private boolean debug = false;
private String tableName = "";
/**
* A Cluster Connection instance that is shared by all running ycsb threads.
* Needs to be initialized late so we pick up command-line configs if any.
* To ensure one instance only in a multi-threaded context, guard access
* with a 'lock' object.
* @See #CONNECTION_LOCK.
*/
private static Connection connection = null;
// Depending on the value of clientSideBuffering, either bufferedMutator
// (clientSideBuffering) or currentTable (!clientSideBuffering) will be used.
private Table currentTable = null;
private BufferedMutator bufferedMutator = null;
private String columnFamily = "";
private byte[] columnFamilyBytes;
/**
* Durability to use for puts and deletes.
*/
private Durability durability = Durability.USE_DEFAULT;
/** Whether or not a page filter should be used to limit scan length. */
private boolean usePageFilter = true;
/**
* If true, buffer mutations on the client. This is the default behavior for
* HBaseClient. For measuring insert/update/delete latencies, client side
* buffering should be disabled.
*/
private boolean clientSideBuffering = false;
private long writeBufferSize = 1024 * 1024 * 12;
/**
* Initialize any state for this DB. Called once per DB instance; there is one
* DB instance per client thread.
*/
@Override
public void init() throws DBException {
if ("true"
.equals(getProperties().getProperty("clientbuffering", "false"))) {
this.clientSideBuffering = true;
}
if (getProperties().containsKey("writebuffersize")) {
writeBufferSize =
Long.parseLong(getProperties().getProperty("writebuffersize"));
}
if (getProperties().getProperty("durability") != null) {
this.durability =
Durability.valueOf(getProperties().getProperty("durability"));
}
if ("kerberos".equalsIgnoreCase(config.get("hbase.security.authentication"))) {
config.set("hadoop.security.authentication", "Kerberos");
UserGroupInformation.setConfiguration(config);
}
if ((getProperties().getProperty("principal")!=null)
&& (getProperties().getProperty("keytab")!=null)) {
try {
UserGroupInformation.loginUserFromKeytab(getProperties().getProperty("principal"),
getProperties().getProperty("keytab"));
} catch (IOException e) {
System.err.println("Keytab file is not readable or not found");
throw new DBException(e);
}
}
String table = getProperties().getProperty(TABLENAME_PROPERTY, TABLENAME_PROPERTY_DEFAULT);
try {
THREAD_COUNT.getAndIncrement();
synchronized (THREAD_COUNT) {
if (connection == null) {
// Initialize if not set up already.
connection = ConnectionFactory.createConnection(config);
// Terminate right now if table does not exist, since the client
// will not propagate this error upstream once the workload
// starts.
final TableName tName = TableName.valueOf(table);
connection.getTable(tName).getTableDescriptor();
}
}
} catch (java.io.IOException e) {
throw new DBException(e);
}
if ((getProperties().getProperty("debug") != null)
&& (getProperties().getProperty("debug").compareTo("true") == 0)) {
debug = true;
}
if ("false"
.equals(getProperties().getProperty("hbase.usepagefilter", "true"))) {
usePageFilter = false;
}
columnFamily = getProperties().getProperty("columnfamily");
if (columnFamily == null) {
System.err.println("Error, must specify a columnfamily for HBase table");
throw new DBException("No columnfamily specified");
}
columnFamilyBytes = Bytes.toBytes(columnFamily);
}
/**
* Cleanup any state for this DB. Called once per DB instance; there is one DB
* instance per client thread.
*/
@Override
public void cleanup() throws DBException {
// Get the measurements instance as this is the only client that should
// count clean up time like an update if client-side buffering is
// enabled.
Measurements measurements = Measurements.getMeasurements();
try {
long st = System.nanoTime();
if (bufferedMutator != null) {
bufferedMutator.close();
}
if (currentTable != null) {
currentTable.close();
}
long en = System.nanoTime();
final String type = clientSideBuffering ? "UPDATE" : "CLEANUP";
measurements.measure(type, (int) ((en - st) / 1000));
int threadCount = THREAD_COUNT.decrementAndGet();
if (threadCount <= 0) {
// Means we are done so ok to shut down the Connection.
synchronized (THREAD_COUNT) {
if (connection != null) {
connection.close();
connection = null;
}
}
}
} catch (IOException e) {
throw new DBException(e);
}
}
public void getHTable(String table) throws IOException {
final TableName tName = TableName.valueOf(table);
this.currentTable = connection.getTable(tName);
if (clientSideBuffering) {
final BufferedMutatorParams p = new BufferedMutatorParams(tName);
p.writeBufferSize(writeBufferSize);
this.bufferedMutator = connection.getBufferedMutator(p);
}
}
/**
* Read a record from the database. Each field/value pair from the result will
* be stored in a HashMap.
*
* @param table
* The name of the table
* @param key
* The record key of the record to read.
* @param fields
* The list of fields to read, or null for all of them
* @param result
* A HashMap of field/value pairs for the result
* @return Zero on success, a non-zero error code on error
*/
public Status read(String table, String key, Set<String> fields,
Map<String, ByteIterator> result) {
// if this is a "new" table, init HTable object. Else, use existing one
if (!tableName.equals(table)) {
currentTable = null;
try {
getHTable(table);
tableName = table;
} catch (IOException e) {
System.err.println("Error accessing HBase table: " + e);
return Status.ERROR;
}
}
Result r = null;
try {
if (debug) {
System.out
.println("Doing read from HBase columnfamily " + columnFamily);
System.out.println("Doing read for key: " + key);
}
Get g = new Get(Bytes.toBytes(key));
if (fields == null) {
g.addFamily(columnFamilyBytes);
} else {
for (String field : fields) {
g.addColumn(columnFamilyBytes, Bytes.toBytes(field));
}
}
r = currentTable.get(g);
} catch (IOException e) {
if (debug) {
System.err.println("Error doing get: " + e);
}
return Status.ERROR;
} catch (ConcurrentModificationException e) {
// do nothing for now...need to understand HBase concurrency model better
return Status.ERROR;
}
if (r.isEmpty()) {
return Status.NOT_FOUND;
}
while (r.advance()) {
final Cell c = r.current();
result.put(Bytes.toString(CellUtil.cloneQualifier(c)),
new ByteArrayByteIterator(CellUtil.cloneValue(c)));
if (debug) {
System.out.println(
"Result for field: " + Bytes.toString(CellUtil.cloneQualifier(c))
+ " is: " + Bytes.toString(CellUtil.cloneValue(c)));
}
}
return Status.OK;
}
/**
* Perform a range scan for a set of records in the database. Each field/value
* pair from the result will be stored in a HashMap.
*
* @param table
* The name of the table
* @param startkey
* The record key of the first record to read.
* @param recordcount
* The number of records to read
* @param fields
* The list of fields to read, or null for all of them
* @param result
* A Vector of HashMaps, where each HashMap is a set field/value
* pairs for one record
* @return Zero on success, a non-zero error code on error
*/
@Override
public Status scan(String table, String startkey, int recordcount,
Set<String> fields, Vector<HashMap<String, ByteIterator>> result) {
// if this is a "new" table, init HTable object. Else, use existing one
if (!tableName.equals(table)) {
currentTable = null;
try {
getHTable(table);
tableName = table;
} catch (IOException e) {
System.err.println("Error accessing HBase table: " + e);
return Status.ERROR;
}
}
Scan s = new Scan(Bytes.toBytes(startkey));
// HBase has no record limit. Here, assume recordcount is small enough to
// bring back in one call.
// We get back recordcount records
s.setCaching(recordcount);
if (this.usePageFilter) {
s.setFilter(new PageFilter(recordcount));
}
// add specified fields or else all fields
if (fields == null) {
s.addFamily(columnFamilyBytes);
} else {
for (String field : fields) {
s.addColumn(columnFamilyBytes, Bytes.toBytes(field));
}
}
// get results
ResultScanner scanner = null;
try {
scanner = currentTable.getScanner(s);
int numResults = 0;
for (Result rr = scanner.next(); rr != null; rr = scanner.next()) {
// get row key
String key = Bytes.toString(rr.getRow());
if (debug) {
System.out.println("Got scan result for key: " + key);
}
HashMap<String, ByteIterator> rowResult =
new HashMap<String, ByteIterator>();
while (rr.advance()) {
final Cell cell = rr.current();
rowResult.put(Bytes.toString(CellUtil.cloneQualifier(cell)),
new ByteArrayByteIterator(CellUtil.cloneValue(cell)));
}
// add rowResult to result vector
result.add(rowResult);
numResults++;
// PageFilter does not guarantee that the number of results is <=
// pageSize, so this
// break is required.
if (numResults >= recordcount) {// if hit recordcount, bail out
break;
}
} // done with row
} catch (IOException e) {
if (debug) {
System.out.println("Error in getting/parsing scan result: " + e);
}
return Status.ERROR;
} finally {
if (scanner != null) {
scanner.close();
}
}
return Status.OK;
}
/**
* Update a record in the database. Any field/value pairs in the specified
* values HashMap will be written into the record with the specified record
* key, overwriting any existing values with the same field name.
*
* @param table
* The name of the table
* @param key
* The record key of the record to write
* @param values
* A HashMap of field/value pairs to update in the record
* @return Zero on success, a non-zero error code on error
*/
@Override
public Status update(String table, String key,
Map<String, ByteIterator> values) {
// if this is a "new" table, init HTable object. Else, use existing one
if (!tableName.equals(table)) {
currentTable = null;
try {
getHTable(table);
tableName = table;
} catch (IOException e) {
System.err.println("Error accessing HBase table: " + e);
return Status.ERROR;
}
}
if (debug) {
System.out.println("Setting up put for key: " + key);
}
Put p = new Put(Bytes.toBytes(key));
p.setDurability(durability);
for (Map.Entry<String, ByteIterator> entry : values.entrySet()) {
byte[] value = entry.getValue().toArray();
if (debug) {
System.out.println("Adding field/value " + entry.getKey() + "/"
+ Bytes.toStringBinary(value) + " to put request");
}
p.addColumn(columnFamilyBytes, Bytes.toBytes(entry.getKey()), value);
}
try {
if (clientSideBuffering) {
Preconditions.checkNotNull(bufferedMutator);
bufferedMutator.mutate(p);
} else {
currentTable.put(p);
}
} catch (IOException e) {
if (debug) {
System.err.println("Error doing put: " + e);
}
return Status.ERROR;
} catch (ConcurrentModificationException e) {
// do nothing for now...hope this is rare
return Status.ERROR;
}
return Status.OK;
}
/**
* Insert a record in the database. Any field/value pairs in the specified
* values HashMap will be written into the record with the specified record
* key.
*
* @param table
* The name of the table
* @param key
* The record key of the record to insert.
* @param values
* A HashMap of field/value pairs to insert in the record
* @return Zero on success, a non-zero error code on error
*/
@Override
public Status insert(String table, String key,
Map<String, ByteIterator> values) {
return update(table, key, values);
}
/**
* Delete a record from the database.
*
* @param table
* The name of the table
* @param key
* The record key of the record to delete.
* @return Zero on success, a non-zero error code on error
*/
@Override
public Status delete(String table, String key) {
// if this is a "new" table, init HTable object. Else, use existing one
if (!tableName.equals(table)) {
currentTable = null;
try {
getHTable(table);
tableName = table;
} catch (IOException e) {
System.err.println("Error accessing HBase table: " + e);
return Status.ERROR;
}
}
if (debug) {
System.out.println("Doing delete for key: " + key);
}
final Delete d = new Delete(Bytes.toBytes(key));
d.setDurability(durability);
try {
if (clientSideBuffering) {
Preconditions.checkNotNull(bufferedMutator);
bufferedMutator.mutate(d);
} else {
currentTable.delete(d);
}
} catch (IOException e) {
if (debug) {
System.err.println("Error doing delete: " + e);
}
return Status.ERROR;
}
return Status.OK;
}
@VisibleForTesting
void setConfiguration(final Configuration newConfig) {
this.config = newConfig;
}
}
/*
* For customized vim control set autoindent set si set shiftwidth=4
*/
| |
/*
* Copyright 2015 BISEL, Heriot-Watt University, Edinburgh, UK (http://www.bisel.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ac.hw.macs.bisel.phis.iqs.v1;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.URLEncoder;
import java.util.Enumeration;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import uk.ac.hw.macs.bisel.phis.iqs.CommunicateWithSolr;
import uk.ac.hw.macs.bisel.phis.iqs.GetHost;
/**
*
* @author kcm
*/
@WebServlet(name = "v100AS", urlPatterns = {"/v100AS"})
public class v100AS extends HttpServlet {
private static final String url = GetHost.getEBI("100")+"getAutosuggest?"; // stem of every SOLR query
private static final Logger logger = Logger.getLogger(System.class.getName());
/**
* Processes requests for both HTTP <code>GET</code> and <code>POST</code>
* methods.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
response.setContentType("application/json;charset=UTF-8");
response.setHeader("Access-Control-Allow-Origin", "*");
boolean error = false; // has an error been detected?
String solrResult = ""; // JSON doc sent back to UI
// create URL for SOLR query
String queryURL = url;
boolean first = true;
Map<String, String[]> params = request.getParameterMap(); // get map of parameters and their values
Enumeration<String> allParams = request.getParameterNames(); // get a list of parameter names
while (allParams.hasMoreElements()) {
String param = allParams.nextElement();
if (param.equals("term")) { // ID of channel
if (!first) { // at the moment it will always be the first (and only) param
queryURL += "&";
}
queryURL += "term=" + URLEncoder.encode(params.get("term")[0], "UTF-8"); // extend stem with parameter
first = false; // next time you need a separator
// choose number of results to ask for... lots of results is very costly
} else if (param.equals("num")) { // number of results to return
if (!first) {
queryURL += "&";
}
// ensure a number is supplied by GUI
Integer temp = 1;
try {
temp = new Integer(params.get("num")[0]);
} catch (NumberFormatException nfe) {
error = true;
solrResult = "{\"invalid_num_specified\": \"" + params.get("num")[0] + "\"}";
break;
}
queryURL += "resultNo=" + URLEncoder.encode(params.get("num")[0], "UTF-8");
first = false; // next time you need a separator
// 2015-03-17
} else if (param.equals("type")) {
if (!first) {
queryURL += "&";
}
queryURL += "type=" + URLEncoder.encode(params.get("type")[0], "UTF-8");
first = false; // next time you need a separator
} else if (param.equals("stage")) {
if (!first) {
queryURL += "&";
}
queryURL += "stage=" + URLEncoder.encode(params.get("stage")[0], "UTF-8");
first = false; // next time you need a separator
} else if (param.equals("imagingMethod")) {
if (!first) {
queryURL += "&";
}
queryURL += "imagingMethod=" + URLEncoder.encode(params.get("imagingMethod")[0], "UTF-8");
first = false; // next time you need a separator
} else if (param.equals("taxon")) {
if (!first) {
queryURL += "&";
}
queryURL += "taxon=" + URLEncoder.encode(params.get("taxon")[0], "UTF-8");
first = false; // next time you need a separator
} else if (param.equals("sampleType")) {
if (!first) {
queryURL += "&";
}
queryURL += "sampleType=" + URLEncoder.encode(params.get("sampleType")[0], "UTF-8");
first = false; // next time you need a separator
} else if (param.equals("imageGeneratedBy")) {
if (!first) {
queryURL += "&";
}
queryURL += "imageGeneratedBy=" + URLEncoder.encode(params.get("imageGeneratedBy")[0], "UTF-8");
first = false; // next time you need a separator
} else if (param.equalsIgnoreCase("asType")) {
if (!first) {
queryURL += "&";
}
String asType = params.get("asType")[0];
if(asType.equals("GENE") || asType.equals("ANATOMY") || asType.equals("PHENOTYPE")) {
queryURL += "autosuggestType=" + URLEncoder.encode(asType, "UTF-8");
} else {
error = true;
solrResult = "{\"invalid_paramater_value\": \"" + asType + "\"}";
break;
}
first = false; // next time you need a separator
} else if (param.equalsIgnoreCase("version")) {
// do nothing
//
//
//
// @depreciated
} else if (param.equals("mutantGene")) {
if (!first) { // at the moment it will always be the first (and only) param
queryURL += "&";
}
queryURL += "mutantGene=" + URLEncoder.encode(params.get("mutantGene")[0], "UTF-8"); // extend stem with parameter
first = false; // next time you need a separator
// @depreciated
} else if (param.equals("expressedGeneOrAllele")) {
if (!first) { // at the moment it will always be the first (and only) param
queryURL += "&";
}
queryURL += "expressedGeneOrAllele=" + URLEncoder.encode(params.get("expressedGeneOrAllele")[0], "UTF-8"); // extend stem with parameter
first = false; // next time you need a separator
// @depreciated
} else if (param.equals("phenotype")) {
if (!first) { // at the moment it will always be the first (and only) param
queryURL += "&";
}
queryURL += "phenotype=" + URLEncoder.encode(params.get("phenotype")[0], "UTF-8"); // extend stem with parameter
first = false; // next time you need a separator
} else { // parameter was not recognised, send error
error = true; // error has been detected
logger.log(Level.WARNING, "Client sent invalid parameter: {0}", param);
solrResult = "{\"invalid_paramater\": \"" + param + "\"}";
break;
}
}
// run query against SOLR API
if (!error) { // if no error detected
CommunicateWithSolr cws = new CommunicateWithSolr();
solrResult = cws.talk(queryURL);
} else {
logger.log(Level.SEVERE, "[BAD QUERY] {0}", queryURL);
}
try ( // send result to client (UI)
PrintWriter out = response.getWriter()) {
out.println(solrResult); // may be error or genuine result
}
}
// <editor-fold defaultstate="collapsed" desc="HttpServlet methods. Click on the + sign on the left to edit the code.">
/**
* Handles the HTTP <code>GET</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
/**
* Handles the HTTP <code>POST</code> method.
*
* @param request servlet request
* @param response servlet response
* @throws ServletException if a servlet-specific error occurs
* @throws IOException if an I/O error occurs
*/
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
/**
* Returns a short description of the servlet.
*
* @return a String containing servlet description
*/
@Override
public String getServletInfo() {
return "Short description";
}// </editor-fold>
}
| |
package com.warsawcitygame.Utils;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.drawable.ColorDrawable;
import android.view.LayoutInflater;
import android.view.View;
import android.view.Window;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import com.squareup.okhttp.ResponseBody;
import com.warsawcitygame.Activities.LoginActivity;
import com.warsawcitygame.Adapters.FriendListViewAdapter;
import com.warsawcitygame.Fragments.CurrentMissionFragment;
import com.warsawcitygame.R;
import com.warsawcitygames.models.UserMissionModel;
import com.warsawcitygames.models.friends_models.FriendModel;
import com.warsawcitygamescommunication.Services.FriendshipsService;
import java.io.IOError;
import java.util.InputMismatchException;
import java.util.LinkedList;
import java.util.List;
import retrofit.Call;
import retrofit.Response;
import retrofit.Retrofit;
public class DialogUtils
{
public static void RaiseDialogEditTextView(Context context, Activity activity, String textToEdit, String description, final TextView oldText)
{
RaiseDialogEditTextView(context, activity, textToEdit, description, oldText, null);
}
public static void RaiseDialogEditTextView(Context context, Activity activity, String textToEdit, String description, final TextView oldText, final DelegateAction onSaveAction)
{
AlertDialog.Builder builder = new AlertDialog.Builder(context);
LayoutInflater inflater = activity.getLayoutInflater();
View dialogView = inflater.inflate(R.layout.dialog_edit_text, null);
builder.setView(dialogView);
final EditText editField = (EditText)dialogView.findViewById(R.id.editable_text);
editField.setText(textToEdit);
final TextView descriptionTxt = (TextView)dialogView.findViewById(R.id.description);
descriptionTxt.setText(description);
final Button negativeButton = (Button)dialogView.findViewById(R.id.negativeButton);
final Dialog dialog = builder.create();
negativeButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dialog.dismiss();
}
});
final Button positiveButton = (Button)dialogView.findViewById(R.id.positiveButton);
positiveButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
oldText.setText(editField.getText().toString());
dialog.dismiss();
if(onSaveAction!=null)
onSaveAction.ExecuteAction();
}
});
dialog.show();
}
public static void RaiseChangePasswordDialog(Context context, Activity activity, String[] descriptions, final TextView oldText, final DelegateActionParams<String> onSaveAction){
AlertDialog.Builder builder = new AlertDialog.Builder(context);
LayoutInflater inflater = activity.getLayoutInflater();
View dialogView = inflater.inflate(R.layout.dialog_password_change, null);
builder.setView(dialogView);
final EditText editField1 = (EditText)dialogView.findViewById(R.id.editable_text1);
final TextView descriptionTxt1 = (TextView)dialogView.findViewById(R.id.description1);
final EditText editField2 = (EditText)dialogView.findViewById(R.id.editable_text2);
final TextView descriptionTxt2 = (TextView)dialogView.findViewById(R.id.description2);
final Button negativeButton = (Button)dialogView.findViewById(R.id.negativeButton);
final Dialog dialog = builder.create();
negativeButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dialog.dismiss();
}
});
final Button positiveButton = (Button)dialogView.findViewById(R.id.positiveButton);
positiveButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dialog.dismiss();
if(onSaveAction!=null)
onSaveAction.ExecuteAction(new String[]{editField1.getText().toString(), editField2.getText().toString()});
}
});
dialog.show();
}
public static void RaiseDialogShowProfile(final Context context, Bitmap bitmap, final FriendModel modelObj, final FriendshipsService service, final List<FriendModel> friends, final List<FriendModel> searchResults, final FriendListViewAdapter adapter)
{
final Dialog dialog = new Dialog(context, R.style.TransparentStretchedDialog);
dialog.setContentView(R.layout.dialog_profile);
final TextView descriptionTxt = (TextView)dialog.findViewById(R.id.level);
descriptionTxt.setText(modelObj.Username);
final TextView usernameTxt = (TextView)dialog.findViewById(R.id.userNameTop);
usernameTxt.setText(modelObj.Name);
final ImageView imageView = (ImageView)dialog.findViewById(R.id.profilePic);
if(bitmap!=null)
imageView.setImageBitmap(bitmap);
final Button negativeButton = (Button)dialog.findViewById(R.id.btn_dialog);
negativeButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dialog.dismiss();
}
});
final Button friendActionButton = (Button)dialog.findViewById(R.id.btn_profile_action);
if(modelObj.ActionType.equals("add"))
friendActionButton.setText("Add friend");
else if(modelObj.ActionType.equals("remove"))
friendActionButton.setText("Remove friend");
else
friendActionButton.setVisibility(View.INVISIBLE);
friendActionButton.setOnClickListener(new View.OnClickListener()
{
@Override
public void onClick(View v)
{
dialog.dismiss();
final Dialog loading = DialogUtils.RaiseDialogLoading(context, true);
Call<ResponseBody> call = null;
if(modelObj.ActionType.equals("add"))
call = service.AssignFriend(modelObj.Id);
else if(modelObj.ActionType.equals("remove"))
call = service.RemoveFriend(modelObj.Id);
if (call != null)
{
call.enqueue(new CustomCallback<ResponseBody>(context)
{
@Override
public void onSuccess(ResponseBody model)
{
if(modelObj.ActionType.equals("add"))
{
friends.add(modelObj);
searchResults.remove(modelObj);
}
else
{
friends.remove(modelObj);
searchResults.remove(modelObj);
}
adapter.notifyDataSetChanged();
DialogUtils.RaiseDialogShowError(context, "Success", "Successfully completed action.");
loading.dismiss();
}
@Override
public void onResponse(Response<ResponseBody> response, Retrofit retrofit)
{
if (!response.isSuccess())
DialogUtils.RaiseDialogShowError(context, "Error", "Something went wrong");
else
{
if(modelObj.ActionType.equals("add"))
{
friends.add(modelObj);
searchResults.remove(modelObj);
}
else
{
friends.remove(modelObj);
searchResults.remove(modelObj);
}
adapter.notifyDataSetChanged();
DialogUtils.RaiseDialogShowError(context, "Success", "Successfully completed action.");
}
loading.dismiss();
}
@Override
public void onFailure(Throwable t)
{
loading.dismiss();
DialogUtils.RaiseDialogShowError(context, "Error", "Something went wrong");
}
});
}
}
});
dialog.show();
}
public static Dialog RaiseDialogLoading(Context context)
{
return RaiseDialogLoading(context, true);
}
public static Dialog RaiseDialogLoading(Context context, boolean show)
{
final Dialog dialog = new Dialog(context, R.style.TransparentStretchedDialog);
dialog.setContentView(R.layout.dialog_loading);
if(show)dialog.show();
return dialog;
}
public static Dialog RaiseDialogShowError(Context context, String title, String text)
{
final Dialog dialog = new Dialog(context, R.style.TransparentStretchedDialog);
dialog.setContentView(R.layout.dialog_error);
((TextView)dialog.findViewById(R.id.error_title)).setText(title);
((TextView)dialog.findViewById(R.id.error_msg)).setText(text);
Button ok = ((Button)dialog.findViewById(R.id.error_dismiss));
ok.setText("OK");
ok.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dialog.dismiss();
}
});
dialog.show();
return dialog;
}
public static Dialog RaiseDialogAbortMissionConfirmation(Context context, final Activity activity, final DelegateAction action)
{
AlertDialog.Builder builder = new AlertDialog.Builder(context);
LayoutInflater inflater = activity.getLayoutInflater();
View dialogView = inflater.inflate(R.layout.dialog_confirmation, null);
builder.setView(dialogView);
final Button negativeButton = (Button)dialogView.findViewById(R.id.negativeButton);
final Dialog dialog = builder.create();
negativeButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dialog.dismiss();
}
});
final Button positiveButton = (Button)dialogView.findViewById(R.id.positiveButton);
positiveButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if(action != null) action.ExecuteAction();
dialog.dismiss();
}
});
dialog.show();
return dialog;
}
public static void RaiseAchievementDialog(String msg, Context context)
{
final Dialog dialog = new Dialog(context);
dialog.requestWindowFeature(Window.FEATURE_NO_TITLE);
dialog.setCancelable(false);
dialog.setContentView(R.layout.dialog_popup_information);
TextView text = (TextView) dialog.findViewById(R.id.text_dialog);
text.setText(msg);
Button dialogButton = (Button) dialog.findViewById(R.id.btn_dialog);
dialogButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dialog.dismiss();
}
});
dialog.show();
}
public static void showShortToast(String txt, Context context)
{
int duration = Toast.LENGTH_SHORT;
Toast toast = Toast.makeText(context, txt, duration);
toast.show();
}
}
| |
package com.shareyourproxy.app.fragment;
import android.content.Context;
import android.content.Intent;
import android.graphics.drawable.Drawable;
import android.os.Bundle;
import android.support.v7.widget.DefaultItemAnimator;
import android.support.v7.widget.LinearLayoutManager;
import android.text.SpannableStringBuilder;
import android.text.Spanned;
import android.text.style.TextAppearanceSpan;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.shareyourproxy.IntentLauncher;
import com.shareyourproxy.R;
import com.shareyourproxy.api.domain.model.User;
import com.shareyourproxy.api.rx.JustObserver;
import com.shareyourproxy.api.rx.RxActivityFeedSync;
import com.shareyourproxy.api.rx.RxHelper;
import com.shareyourproxy.api.rx.command.eventcallback.ActivityFeedDownloadedEvent;
import com.shareyourproxy.app.adapter.ActivityFeedAdapter;
import com.shareyourproxy.app.adapter.BaseRecyclerView;
import com.shareyourproxy.app.adapter.BaseViewHolder.ItemClickListener;
import com.shareyourproxy.app.dialog.ErrorDialog;
import com.shareyourproxy.widget.ContentDescriptionDrawable;
import com.twitter.sdk.android.Twitter;
import com.twitter.sdk.android.core.Callback;
import com.twitter.sdk.android.core.Result;
import com.twitter.sdk.android.core.TwitterException;
import com.twitter.sdk.android.core.TwitterSession;
import com.twitter.sdk.android.core.identity.TwitterLoginButton;
import butterknife.Bind;
import butterknife.BindDimen;
import butterknife.BindString;
import butterknife.ButterKnife;
import retrofit.Response;
import retrofit.Retrofit;
import rx.subscriptions.CompositeSubscription;
import timber.log.Timber;
import static com.shareyourproxy.Constants.ARG_USER_SELECTED_PROFILE;
import static com.shareyourproxy.util.ViewUtils.svgToBitmapDrawable;
/**
* Created by Evan on 10/10/15.
*/
public class UserFeedFragment extends BaseFragment implements ItemClickListener {
@Bind(R.id.fragment_user_feed_recyclerview)
BaseRecyclerView recyclerView;
@Bind(R.id.fragment_user_feed_empty_textview)
TextView emptyTextView;
@BindString(R.string.fragment_userfeed_empty_title)
String loggedInNullTitle;
@BindString(R.string.fragment_userfeed_empty_message)
String stringNullMessage;
@BindString(R.string.fragment_userprofile_contact_empty_title)
String contactNullTitle;
@BindDimen(R.dimen.common_svg_null_screen_small)
int marginNullScreen;
@BindString(R.string.twitter_login_error)
String twitterLoginError;
@BindString(R.string.twitter_login_error_message)
String twitterLoginErrorMessage;
private boolean _isLoggedInUser;
private User _userContact;
private CompositeSubscription _subscriptions;
private ActivityFeedAdapter _adapter;
private TwitterLoginButton twitterLoginButton;
private int _lastClickedAuthItem;
private RxHelper _rxHelper = RxHelper.INSTANCE;
/**
* Constructor.
*/
public UserFeedFragment() {
}
/**
* Create a new user activity feed fragment.
*
* @return user activity feed fragment.
*/
public static UserFeedFragment newInstance() {
return new UserFeedFragment();
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
_userContact = getActivity().getIntent().getExtras().getParcelable
(ARG_USER_SELECTED_PROFILE);
_isLoggedInUser = isLoggedInUser(_userContact);
}
@Override
public View onCreateView(
LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_user_feed, container, false);
ButterKnife.bind(this, rootView);
initialize();
return rootView;
}
public void getUserFeed(TwitterSession session) {
_subscriptions = _rxHelper.checkCompositeButton(_subscriptions);
_subscriptions.add(RxActivityFeedSync.INSTANCE
.getChannelFeed(getActivity(), session, _userContact.channels())
.subscribe(ActivityFeedObserver()));
}
public JustObserver<ActivityFeedDownloadedEvent> ActivityFeedObserver() {
return new JustObserver<ActivityFeedDownloadedEvent>() {
@Override
public void next(ActivityFeedDownloadedEvent event) {
activityFeedDownloaded(event);
}
};
}
/**
* Initialize a twitter login button with a callback to handle errors.
*/
private void initializeTwitterLogin() {
twitterLoginButton = new TwitterLoginButton(getActivity());
twitterLoginButton.setVisibility(View.GONE);
twitterLoginButton.setCallback(new Callback<TwitterSession>() {
@Override
public void onResponse(Response<TwitterSession> response, Retrofit retrofit) {
}
@Override
public void onFailure(Throwable t) {
}
@Override
public void success(Result<TwitterSession> result) {
Twitter.getSessionManager().setActiveSession(result.data);
_adapter.removeItem(_lastClickedAuthItem);
getUserFeed(result.data);
}
@Override
public void failure(TwitterException exception) {
Timber.e(Log.getStackTraceString(exception));
ErrorDialog.newInstance(twitterLoginError,
twitterLoginErrorMessage).show(getActivity().getSupportFragmentManager());
}
});
}
private void activityFeedDownloaded(ActivityFeedDownloadedEvent event) {
_adapter.refreshFeedData(event.feedItems);
}
@Override
public void onResume() {
super.onResume();
_subscriptions = _rxHelper.checkCompositeButton(_subscriptions);
}
@Override
public void onPause() {
super.onPause();
_subscriptions.unsubscribe();
_subscriptions = null;
}
/**
* Initialize this fragments views.
*/
private void initialize() {
initializeTwitterLogin();
initializeRecyclerView();
TwitterSession session = Twitter.getSessionManager().getActiveSession();
getUserFeed(session);
}
/**
* Initialize a recyclerView with User data.
*/
private void initializeRecyclerView() {
initializeEmptyView();
recyclerView.setLayoutManager(new LinearLayoutManager(getActivity()));
_adapter = ActivityFeedAdapter.newInstance(recyclerView, _userContact, this);
recyclerView.setAdapter(_adapter);
recyclerView.setHasFixedSize(true);
recyclerView.setItemAnimator(new DefaultItemAnimator());
}
private void initializeEmptyView() {
Context context = getContext();
if (_isLoggedInUser) {
SpannableStringBuilder sb = new SpannableStringBuilder(loggedInNullTitle).append("\n")
.append(stringNullMessage);
sb.setSpan(new TextAppearanceSpan(context, R.style.Proxy_TextAppearance_Body2),
0, loggedInNullTitle.length(), Spanned.SPAN_INCLUSIVE_INCLUSIVE);
sb.setSpan(new TextAppearanceSpan(context, R.style.Proxy_TextAppearance_Body),
loggedInNullTitle.length() + 1, sb.length(), Spanned.SPAN_INCLUSIVE_INCLUSIVE);
emptyTextView.setText(sb);
emptyTextView.setCompoundDrawablesWithIntrinsicBounds(
null, getNullDrawable(R.raw.ic_ghost_doge), null, null);
} else {
String contactNullMessage = getString(
R.string.fragment_userprofile_contact_empty_message, _userContact.first());
SpannableStringBuilder sb = new SpannableStringBuilder(contactNullTitle).append("\n")
.append(contactNullMessage);
sb.setSpan(new TextAppearanceSpan(context, R.style.Proxy_TextAppearance_Body2),
0, loggedInNullTitle.length(), Spanned.SPAN_INCLUSIVE_INCLUSIVE);
sb.setSpan(new TextAppearanceSpan(context, R.style.Proxy_TextAppearance_Body),
loggedInNullTitle.length() + 1, sb.length(), Spanned.SPAN_INCLUSIVE_INCLUSIVE);
emptyTextView.setText(sb);
emptyTextView.setCompoundDrawablesWithIntrinsicBounds(
null, getNullDrawable(R.raw.ic_ghost_sloth), null, null);
}
recyclerView.setEmptyView(emptyTextView);
}
/**
* Parse a svg and return a null screen sized {@link ContentDescriptionDrawable} .
*
* @return Drawable with a contentDescription
*/
private Drawable getNullDrawable(int resId) {
return svgToBitmapDrawable(getActivity(), resId, marginNullScreen);
}
@Override
public void onItemClick(View view, int position) {
switch (_adapter.getItemViewType(position)) {
case ActivityFeedAdapter.VIEWTYPE_HEADER:
_lastClickedAuthItem = position;
switch (_adapter.getItemData(position).channelType()) {
case Twitter:
twitterLoginButton.performClick();
break;
}
break;
case ActivityFeedAdapter.VIEWTYPE_CONTENT:
String url = _adapter.getItemData(position).actionAddress();
IntentLauncher.launchWebIntent(getActivity(), url);
break;
}
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
twitterLoginButton.onActivityResult(requestCode, resultCode, data);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.dubbo.rpc.protocol.thrift;
import com.alibaba.dubbo.common.Constants;
import com.alibaba.dubbo.common.URL;
import com.alibaba.dubbo.common.extension.ExtensionLoader;
import com.alibaba.dubbo.common.utils.ConfigUtils;
import com.alibaba.dubbo.remoting.Channel;
import com.alibaba.dubbo.remoting.RemotingException;
import com.alibaba.dubbo.remoting.Transporter;
import com.alibaba.dubbo.remoting.exchange.ExchangeChannel;
import com.alibaba.dubbo.remoting.exchange.ExchangeClient;
import com.alibaba.dubbo.remoting.exchange.ExchangeHandler;
import com.alibaba.dubbo.remoting.exchange.ExchangeServer;
import com.alibaba.dubbo.remoting.exchange.Exchangers;
import com.alibaba.dubbo.remoting.exchange.support.ExchangeHandlerAdapter;
import com.alibaba.dubbo.rpc.Exporter;
import com.alibaba.dubbo.rpc.Invocation;
import com.alibaba.dubbo.rpc.Invoker;
import com.alibaba.dubbo.rpc.RpcContext;
import com.alibaba.dubbo.rpc.RpcException;
import com.alibaba.dubbo.rpc.protocol.AbstractProtocol;
import com.alibaba.dubbo.rpc.protocol.dubbo.DubboExporter;
import java.util.ArrayList;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
public class ThriftProtocol extends AbstractProtocol {
public static final int DEFAULT_PORT = 40880;
public static final String NAME = "thrift";
// ip:port -> ExchangeServer
private final ConcurrentMap<String, ExchangeServer> serverMap =
new ConcurrentHashMap<String, ExchangeServer>();
private ExchangeHandler handler = new ExchangeHandlerAdapter() {
@Override
public Object reply(ExchangeChannel channel, Object msg) throws RemotingException {
if (msg instanceof Invocation) {
Invocation inv = (Invocation) msg;
String serviceName = inv.getAttachments().get(Constants.INTERFACE_KEY);
String serviceKey = serviceKey(channel.getLocalAddress().getPort(),
serviceName, null, null);
DubboExporter<?> exporter = (DubboExporter<?>) exporterMap.get(serviceKey);
if (exporter == null) {
throw new RemotingException(channel,
"Not found exported service: "
+ serviceKey
+ " in "
+ exporterMap.keySet()
+ ", may be version or group mismatch "
+ ", channel: consumer: "
+ channel.getRemoteAddress()
+ " --> provider: "
+ channel.getLocalAddress()
+ ", message:" + msg);
}
RpcContext.getContext().setRemoteAddress(channel.getRemoteAddress());
return exporter.getInvoker().invoke(inv);
}
throw new RemotingException(channel,
"Unsupported request: "
+ (msg.getClass().getName() + ": " + msg)
+ ", channel: consumer: "
+ channel.getRemoteAddress()
+ " --> provider: "
+ channel.getLocalAddress());
}
@Override
public void received(Channel channel, Object message) throws RemotingException {
if (message instanceof Invocation) {
reply((ExchangeChannel) channel, message);
} else {
super.received(channel, message);
}
}
};
public int getDefaultPort() {
return DEFAULT_PORT;
}
public <T> Exporter<T> export(Invoker<T> invoker) throws RpcException {
// can use thrift codec only
URL url = invoker.getUrl().addParameter(Constants.CODEC_KEY, ThriftCodec.NAME);
// find server.
String key = url.getAddress();
// client can expose a service for server to invoke only.
boolean isServer = url.getParameter(Constants.IS_SERVER_KEY, true);
if (isServer && !serverMap.containsKey(key)) {
serverMap.put(key, getServer(url));
}
// export service.
key = serviceKey(url);
DubboExporter<T> exporter = new DubboExporter<T>(invoker, key, exporterMap);
exporterMap.put(key, exporter);
return exporter;
}
public void destroy() {
super.destroy();
for (String key : new ArrayList<String>(serverMap.keySet())) {
ExchangeServer server = serverMap.remove(key);
if (server != null) {
try {
if (logger.isInfoEnabled()) {
logger.info("Close dubbo server: " + server.getLocalAddress());
}
server.close(ConfigUtils.getServerShutdownTimeout());
} catch (Throwable t) {
logger.warn(t.getMessage(), t);
}
} // ~ end of if ( server != null )
} // ~ end of loop serverMap
} // ~ end of method destroy
public <T> Invoker<T> refer(Class<T> type, URL url) throws RpcException {
ThriftInvoker<T> invoker = new ThriftInvoker<T>(type, url, getClients(url), invokers);
invokers.add(invoker);
return invoker;
}
private ExchangeClient[] getClients(URL url) {
int connections = url.getParameter(Constants.CONNECTIONS_KEY, 1);
ExchangeClient[] clients = new ExchangeClient[connections];
for (int i = 0; i < clients.length; i++) {
clients[i] = initClient(url);
}
return clients;
}
private ExchangeClient initClient(URL url) {
ExchangeClient client;
url = url.addParameter(Constants.CODEC_KEY, ThriftCodec.NAME);
try {
client = Exchangers.connect(url);
} catch (RemotingException e) {
throw new RpcException("Fail to create remoting client for service(" + url
+ "): " + e.getMessage(), e);
}
return client;
}
private ExchangeServer getServer(URL url) {
// enable sending readonly event when server closes by default
url = url.addParameterIfAbsent(Constants.CHANNEL_READONLYEVENT_SENT_KEY, Boolean.TRUE.toString());
String str = url.getParameter(Constants.SERVER_KEY, Constants.DEFAULT_REMOTING_SERVER);
if (str != null && str.length() > 0 && !ExtensionLoader.getExtensionLoader(Transporter.class).hasExtension(str))
throw new RpcException("Unsupported server type: " + str + ", url: " + url);
ExchangeServer server;
try {
server = Exchangers.bind(url, handler);
} catch (RemotingException e) {
throw new RpcException("Fail to start server(url: " + url + ") " + e.getMessage(), e);
}
str = url.getParameter(Constants.CLIENT_KEY);
if (str != null && str.length() > 0) {
Set<String> supportedTypes = ExtensionLoader.getExtensionLoader(Transporter.class).getSupportedExtensions();
if (!supportedTypes.contains(str)) {
throw new RpcException("Unsupported client type: " + str);
}
}
return server;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.ignite.internal.util.typedef.C1;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.jetbrains.annotations.Nullable;
/**
* Cache change requests to execute when receive {@link DynamicCacheChangeBatch} event.
*/
public class ExchangeActions {
/** */
private List<CacheGroupActionData> cacheGrpsToStart;
/** */
private List<CacheGroupActionData> cacheGrpsToStop;
/** */
private Map<String, CacheActionData> cachesToStart;
/** */
private Map<String, CacheActionData> cachesToStop;
/** */
private Map<String, CacheActionData> cachesToResetLostParts;
/** */
private LocalJoinCachesContext locJoinCtx;
/** */
private StateChangeRequest stateChangeReq;
/**
* @param grpId Group ID.
* @return Always {@code true}, fails with assert error if inconsistent.
*/
boolean checkStopRequestConsistency(int grpId) {
Boolean destroy = null;
// Check that caches associated with that group will be all stopped only or all destroyed.
for (CacheActionData action : cacheStopRequests()) {
if (action.descriptor().groupId() == grpId) {
if (destroy == null)
destroy = action.request().destroy();
else {
assert action.request().destroy() == destroy
: "Both cache stop only and cache destroy request associated with one group in batch "
+ cacheStopRequests();
}
}
}
return true;
}
/**
* @return {@code True} if server nodes should not participate in exchange.
*/
public boolean clientOnlyExchange() {
return F.isEmpty(cachesToStart) &&
F.isEmpty(cachesToStop) &&
F.isEmpty(cacheGrpsToStart) &&
F.isEmpty(cacheGrpsToStop) &&
F.isEmpty(cachesToResetLostParts);
}
/**
* @return New caches start requests.
*/
public Collection<CacheActionData> cacheStartRequests() {
return cachesToStart != null ? cachesToStart.values() : Collections.<CacheActionData>emptyList();
}
/**
* @return Stop cache requests.
*/
Collection<CacheActionData> cacheStopRequests() {
return cachesToStop != null ? cachesToStop.values() : Collections.<CacheActionData>emptyList();
}
/**
* @param ctx Context.
*/
public void completeRequestFutures(GridCacheSharedContext ctx) {
completeRequestFutures(cachesToStart, ctx);
completeRequestFutures(cachesToStop, ctx);
completeRequestFutures(cachesToResetLostParts, ctx);
}
/**
* @return {@code True} if starting system caches.
*/
public boolean systemCachesStarting() {
if (cachesToStart != null) {
for (CacheActionData data : cachesToStart.values()) {
if (CU.isSystemCache(data.request().cacheName()))
return true;
}
}
return false;
}
/**
* @param map Actions map.
* @param ctx Context.
*/
private void completeRequestFutures(Map<String, CacheActionData> map, GridCacheSharedContext ctx) {
if (map != null) {
for (CacheActionData req : map.values())
ctx.cache().completeCacheStartFuture(req.req, true, null);
}
}
/**
* @return {@code True} if have cache stop requests.
*/
public boolean hasStop() {
return !F.isEmpty(cachesToStop);
}
/**
* @return Caches to reset lost partitions for.
*/
public Set<String> cachesToResetLostPartitions() {
Set<String> caches = null;
if (cachesToResetLostParts != null)
caches = new HashSet<>(cachesToResetLostParts.keySet());
return caches != null ? caches : Collections.<String>emptySet();
}
/**
* @param cacheId Cache ID.
* @return {@code True} if cache stop was requested.
*/
public boolean cacheStopped(int cacheId) {
if (cachesToStop != null) {
for (CacheActionData cache : cachesToStop.values()) {
if (cache.desc.cacheId() == cacheId)
return true;
}
}
return false;
}
/**
* @param cacheId Cache ID.
* @return {@code True} if cache start was requested.
*/
public boolean cacheStarted(int cacheId) {
if (cachesToStart != null) {
for (CacheActionData cache : cachesToStart.values()) {
if (cache.desc.cacheId() == cacheId)
return true;
}
}
return false;
}
/**
* @param stateChange Cluster state change request.
*/
public void stateChangeRequest(StateChangeRequest stateChange) {
this.stateChangeReq = stateChange;
}
/**
* @return {@code True} if has deactivate request.
*/
public boolean deactivate() {
return stateChangeReq != null && stateChangeReq.activeChanged() && !stateChangeReq.activate();
}
/**
* @return {@code True} if has activate request.
*/
public boolean activate() {
return stateChangeReq != null && stateChangeReq.activeChanged() && stateChangeReq.activate();
}
/**
* @return {@code True} if has baseline topology change request.
*/
public boolean changedBaseline() {
return stateChangeReq != null && !stateChangeReq.activeChanged();
}
/**
* @return Cluster state change request.
*/
@Nullable public StateChangeRequest stateChangeRequest() {
return stateChangeReq;
}
/**
* @param map Actions map.
* @param req Request.
* @param desc Cache descriptor.
* @return Actions map.
*/
private Map<String, CacheActionData> add(Map<String, CacheActionData> map,
DynamicCacheChangeRequest req,
DynamicCacheDescriptor desc) {
assert req != null;
assert desc != null;
if (map == null)
map = new LinkedHashMap<>();
CacheActionData old = map.put(req.cacheName(), new CacheActionData(req, desc));
assert old == null : old;
return map;
}
/**
* @param req Request.
* @param desc Cache descriptor.
*/
void addCacheToStart(DynamicCacheChangeRequest req, DynamicCacheDescriptor desc) {
assert req.start() : req;
cachesToStart = add(cachesToStart, req, desc);
}
/**
* @param req Request.
* @param desc Cache descriptor.
*/
public void addCacheToStop(DynamicCacheChangeRequest req, DynamicCacheDescriptor desc) {
assert req.stop() : req;
cachesToStop = add(cachesToStop, req, desc);
}
/**
* @param req Request.
* @param desc Cache descriptor.
*/
void addCacheToResetLostPartitions(DynamicCacheChangeRequest req, DynamicCacheDescriptor desc) {
assert req.resetLostPartitions() : req;
cachesToResetLostParts = add(cachesToResetLostParts, req, desc);
}
/**
* @param grpDesc Group descriptor.
*/
void addCacheGroupToStart(CacheGroupDescriptor grpDesc) {
assert grpDesc != null;
if (cacheGrpsToStart == null)
cacheGrpsToStart = new ArrayList<>();
cacheGrpsToStart.add(new CacheGroupActionData(grpDesc));
}
/**
* @return Cache groups to start.
*/
public List<CacheGroupActionData> cacheGroupsToStart() {
return cacheGrpsToStart != null ? cacheGrpsToStart : Collections.<CacheGroupActionData>emptyList();
}
/**
* @param grpId Group ID.
* @return {@code True} if given cache group starting.
*/
public boolean cacheGroupStarting(int grpId) {
if (cacheGrpsToStart != null) {
for (CacheGroupActionData grp : cacheGrpsToStart) {
if (grp.desc.groupId() == grpId)
return true;
}
}
return false;
}
/**
* @param grpDesc Group descriptor.
* @param destroy Destroy flag.
*/
public void addCacheGroupToStop(CacheGroupDescriptor grpDesc, boolean destroy) {
assert grpDesc != null;
if (cacheGrpsToStop == null)
cacheGrpsToStop = new ArrayList<>();
cacheGrpsToStop.add(new CacheGroupActionData(grpDesc, destroy));
}
/**
* @return Cache groups to start.
*/
public List<CacheGroupActionData> cacheGroupsToStop() {
return cacheGrpsToStop != null ? cacheGrpsToStop : Collections.<CacheGroupActionData>emptyList();
}
/**
* @param grpId Group ID.
* @return {@code True} if given cache group stopping.
*/
public boolean cacheGroupStopping(int grpId) {
if (cacheGrpsToStop != null) {
for (CacheGroupActionData grp : cacheGrpsToStop) {
if (grp.desc.groupId() == grpId)
return true;
}
}
return false;
}
/**
* @return {@code True} if there are no cache change actions.
*/
public boolean empty() {
return F.isEmpty(cachesToStart) &&
F.isEmpty(cachesToStop) &&
F.isEmpty(cacheGrpsToStart) &&
F.isEmpty(cacheGrpsToStop) &&
F.isEmpty(cachesToResetLostParts) &&
stateChangeReq == null &&
locJoinCtx == null;
}
/**
* @param locJoinCtx Caches local join context.
*/
public void localJoinContext(LocalJoinCachesContext locJoinCtx) {
this.locJoinCtx = locJoinCtx;
}
/**
* @return Caches local join context.
*/
public LocalJoinCachesContext localJoinContext() {
return locJoinCtx;
}
/**
*
*/
public static class CacheActionData {
/** */
private final DynamicCacheChangeRequest req;
/** */
private final DynamicCacheDescriptor desc;
/**
* @param req Request.
* @param desc Cache descriptor.
*/
CacheActionData(DynamicCacheChangeRequest req, DynamicCacheDescriptor desc) {
assert req != null;
assert desc != null;
this.req = req;
this.desc = desc;
}
/**
* @return Request.
*/
public DynamicCacheChangeRequest request() {
return req;
}
/**
* @return Cache descriptor.
*/
public DynamicCacheDescriptor descriptor() {
return desc;
}
}
/**
*
*/
static class CacheGroupActionData {
/** */
private final CacheGroupDescriptor desc;
/** */
private final boolean destroy;
/**
* @param desc Group descriptor
* @param destroy Destroy flag
*/
CacheGroupActionData(CacheGroupDescriptor desc, boolean destroy) {
assert desc != null;
this.desc = desc;
this.destroy = destroy;
}
/**
* @param desc Group descriptor
*/
CacheGroupActionData(CacheGroupDescriptor desc) {
this(desc, false);
}
/**
* @return Group descriptor
*/
public CacheGroupDescriptor descriptor() {
return desc;
}
/**
* @return Destroy flag
*/
public boolean destroy() {
return destroy;
}
}
/** {@inheritDoc} */
@Override public String toString() {
Object startGrps = F.viewReadOnly(cacheGrpsToStart, new C1<CacheGroupActionData, String>() {
@Override public String apply(CacheGroupActionData data) {
return data.desc.cacheOrGroupName();
}
});
Object stopGrps = F.viewReadOnly(cacheGrpsToStop, new C1<CacheGroupActionData, String>() {
@Override public String apply(CacheGroupActionData data) {
return data.desc.cacheOrGroupName() + ", destroy=" + data.destroy;
}
});
return "ExchangeActions [startCaches=" + (cachesToStart != null ? cachesToStart.keySet() : null) +
", stopCaches=" + (cachesToStop != null ? cachesToStop.keySet() : null) +
", startGrps=" + startGrps +
", stopGrps=" + stopGrps +
", resetParts=" + (cachesToResetLostParts != null ? cachesToResetLostParts.keySet() : null) +
", stateChangeRequest=" + stateChangeReq + ']';
}
}
| |
/*
* Copyright 2012-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.test.autoconfigure.web.servlet;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import javax.servlet.Filter;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.ListableBeanFactory;
import org.springframework.beans.factory.NoSuchBeanDefinitionException;
import org.springframework.boot.web.servlet.AbstractFilterRegistrationBean;
import org.springframework.boot.web.servlet.DelegatingFilterProxyRegistrationBean;
import org.springframework.boot.web.servlet.FilterRegistrationBean;
import org.springframework.boot.web.servlet.RegistrationBean;
import org.springframework.boot.web.servlet.ServletContextInitializerBeans;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.test.web.servlet.MvcResult;
import org.springframework.test.web.servlet.ResultHandler;
import org.springframework.test.web.servlet.result.PrintingResultHandler;
import org.springframework.test.web.servlet.setup.ConfigurableMockMvcBuilder;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.web.context.WebApplicationContext;
/**
* {@link MockMvcBuilderCustomizer} for a typical Spring Boot application. Usually applied
* automatically via {@link AutoConfigureMockMvc @AutoConfigureMockMvc}, but may also be
* used directly.
*
* @author Phillip Webb
* @author Andy Wilkinson
* @since 1.4.0
*/
public class SpringBootMockMvcBuilderCustomizer implements MockMvcBuilderCustomizer {
private final WebApplicationContext context;
private boolean addFilters = true;
private MockMvcPrint print = MockMvcPrint.DEFAULT;
private boolean printOnlyOnFailure = true;
/**
* Create a new {@link SpringBootMockMvcBuilderCustomizer} instance.
* @param context the source application context
*/
public SpringBootMockMvcBuilderCustomizer(WebApplicationContext context) {
Assert.notNull(context, "Context must not be null");
this.context = context;
}
@Override
public void customize(ConfigurableMockMvcBuilder<?> builder) {
if (this.addFilters) {
addFilters(builder);
}
ResultHandler printHandler = getPrintHandler();
if (printHandler != null) {
builder.alwaysDo(printHandler);
}
}
private ResultHandler getPrintHandler() {
LinesWriter writer = getLinesWriter();
if (writer == null) {
return null;
}
if (this.printOnlyOnFailure) {
writer = new DeferredLinesWriter(this.context, writer);
}
return new LinesWritingResultHandler(writer);
}
private LinesWriter getLinesWriter() {
if (this.print == MockMvcPrint.NONE) {
return null;
}
if (this.print == MockMvcPrint.LOG_DEBUG) {
return new LoggingLinesWriter();
}
return new SystemLinesWriter(this.print);
}
private void addFilters(ConfigurableMockMvcBuilder<?> builder) {
FilterRegistrationBeans registrations = new FilterRegistrationBeans(this.context);
registrations.stream().map(AbstractFilterRegistrationBean.class::cast)
.filter(AbstractFilterRegistrationBean::isEnabled)
.forEach((registration) -> addFilter(builder, registration));
}
private void addFilter(ConfigurableMockMvcBuilder<?> builder, AbstractFilterRegistrationBean<?> registration) {
Filter filter = registration.getFilter();
Collection<String> urls = registration.getUrlPatterns();
if (urls.isEmpty()) {
builder.addFilters(filter);
}
else {
builder.addFilter(filter, StringUtils.toStringArray(urls));
}
}
public void setAddFilters(boolean addFilters) {
this.addFilters = addFilters;
}
public boolean isAddFilters() {
return this.addFilters;
}
public void setPrint(MockMvcPrint print) {
this.print = print;
}
public MockMvcPrint getPrint() {
return this.print;
}
public void setPrintOnlyOnFailure(boolean printOnlyOnFailure) {
this.printOnlyOnFailure = printOnlyOnFailure;
}
public boolean isPrintOnlyOnFailure() {
return this.printOnlyOnFailure;
}
/**
* {@link ResultHandler} that prints {@link MvcResult} details to a given
* {@link LinesWriter}.
*/
private static class LinesWritingResultHandler implements ResultHandler {
private final LinesWriter writer;
LinesWritingResultHandler(LinesWriter writer) {
this.writer = writer;
}
@Override
public void handle(MvcResult result) throws Exception {
LinesPrintingResultHandler delegate = new LinesPrintingResultHandler();
delegate.handle(result);
delegate.write(this.writer);
}
private static class LinesPrintingResultHandler extends PrintingResultHandler {
protected LinesPrintingResultHandler() {
super(new Printer());
}
void write(LinesWriter writer) {
writer.write(((Printer) getPrinter()).getLines());
}
private static class Printer implements ResultValuePrinter {
private final List<String> lines = new ArrayList<>();
@Override
public void printHeading(String heading) {
this.lines.add("");
this.lines.add(String.format("%s:", heading));
}
@Override
public void printValue(String label, Object value) {
if (value != null && value.getClass().isArray()) {
value = CollectionUtils.arrayToList(value);
}
this.lines.add(String.format("%17s = %s", label, value));
}
List<String> getLines() {
return this.lines;
}
}
}
}
/**
* Strategy interface to write MVC result lines.
*/
interface LinesWriter {
void write(List<String> lines);
}
/**
* {@link LinesWriter} used to defer writing until errors are detected.
*
* @see MockMvcPrintOnlyOnFailureTestExecutionListener
*/
static class DeferredLinesWriter implements LinesWriter {
private static final String BEAN_NAME = DeferredLinesWriter.class.getName();
private final LinesWriter delegate;
private final ThreadLocal<List<String>> lines = ThreadLocal.withInitial(ArrayList::new);
DeferredLinesWriter(WebApplicationContext context, LinesWriter delegate) {
Assert.state(context instanceof ConfigurableApplicationContext,
"A ConfigurableApplicationContext is required for printOnlyOnFailure");
((ConfigurableApplicationContext) context).getBeanFactory().registerSingleton(BEAN_NAME, this);
this.delegate = delegate;
}
@Override
public void write(List<String> lines) {
this.lines.get().addAll(lines);
}
void writeDeferredResult() {
this.delegate.write(this.lines.get());
}
static DeferredLinesWriter get(ApplicationContext applicationContext) {
try {
return applicationContext.getBean(BEAN_NAME, DeferredLinesWriter.class);
}
catch (NoSuchBeanDefinitionException ex) {
return null;
}
}
void clear() {
this.lines.get().clear();
}
}
/**
* {@link LinesWriter} to output results to the log.
*/
private static class LoggingLinesWriter implements LinesWriter {
private static final Log logger = LogFactory.getLog("org.springframework.test.web.servlet.result");
@Override
public void write(List<String> lines) {
if (logger.isDebugEnabled()) {
StringWriter stringWriter = new StringWriter();
PrintWriter printWriter = new PrintWriter(stringWriter);
for (String line : lines) {
printWriter.println(line);
}
logger.debug("MvcResult details:\n" + stringWriter);
}
}
}
/**
* {@link LinesWriter} to output results to {@code System.out} or {@code System.err}.
*/
private static class SystemLinesWriter implements LinesWriter {
private final MockMvcPrint print;
SystemLinesWriter(MockMvcPrint print) {
this.print = print;
}
@Override
public void write(List<String> lines) {
PrintStream printStream = getPrintStream();
for (String line : lines) {
printStream.println(line);
}
}
private PrintStream getPrintStream() {
if (this.print == MockMvcPrint.SYSTEM_ERR) {
return System.err;
}
return System.out;
}
}
private static class FilterRegistrationBeans extends ServletContextInitializerBeans {
FilterRegistrationBeans(ListableBeanFactory beanFactory) {
super(beanFactory, FilterRegistrationBean.class, DelegatingFilterProxyRegistrationBean.class);
}
@Override
protected void addAdaptableBeans(ListableBeanFactory beanFactory) {
addAsRegistrationBean(beanFactory, Filter.class, new FilterRegistrationBeanAdapter());
}
private static class FilterRegistrationBeanAdapter implements RegistrationBeanAdapter<Filter> {
@Override
public RegistrationBean createRegistrationBean(String name, Filter source, int totalNumberOfSourceBeans) {
FilterRegistrationBean<Filter> bean = new FilterRegistrationBean<>(source);
bean.setName(name);
return bean;
}
}
}
}
| |
package org.jgroups.protocols;
import org.jgroups.*;
import org.jgroups.annotations.Experimental;
import org.jgroups.annotations.MBean;
import org.jgroups.annotations.ManagedAttribute;
import org.jgroups.annotations.ManagedOperation;
import org.jgroups.stack.Protocol;
import org.jgroups.util.Bits;
import org.jgroups.util.MessageBatch;
import org.jgroups.util.Table;
import org.jgroups.util.Util;
import java.io.DataInput;
import java.io.DataOutput;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.BiConsumer;
import java.util.function.Supplier;
/**
* Implementation of total order protocol using a sequencer_uum.
*
* Todo 1: on a sequencer change, the new coordinator needs to determine the highest seqno from all members
* Todo 2: on a sequencer change, if a member has pendindg messages in the forward-queue, they need to be resent
* Todo 3: this protocol is currently broken, as a new member doesn't get the highest seqno and thus creates its table
* at offset=0, which means it will queue all messages higher than 0, and eventually run out of memory!!!
*
* @author Bela Ban
* @edited Andrei Palade
*/
@Experimental
@MBean(description="Implementation of total order protocol using a sequencer (unicast-unicast-multicast)")
public class SEQUENCER2 extends Protocol {
protected Address local_addr;
protected volatile Address coord;
protected volatile View view;
protected volatile boolean is_coord=false;
protected final AtomicLong seqno=new AtomicLong(0); // only used by the sequencer
// messages to be multicast are added to this queue; when seqnos are received from the sequencer, we remove and
// send messages from the queue
protected final BlockingQueue<Message> fwd_queue=new LinkedBlockingQueue<>(20000); // make this configurable
// the number of seqno requests sent to the sequencer
protected final AtomicInteger seqno_reqs=new AtomicInteger(0);
protected volatile boolean running=true;
protected static final BiConsumer<MessageBatch,Message> BATCH_ACCUMULATOR=MessageBatch::add;
@ManagedAttribute protected long request_msgs;
@ManagedAttribute protected long response_msgs;
@ManagedAttribute protected long bcasts_sent;
@ManagedAttribute protected long bcasts_received;
@ManagedAttribute protected long bcasts_delivered;
@ManagedAttribute protected long sent_requests;
@ManagedAttribute protected long received_requests;
@ManagedAttribute protected long sent_responses;
@ManagedAttribute protected long received_responses;
protected Table<Message> received_msgs = new Table<>();
@ManagedAttribute
public boolean isCoordinator() {return is_coord;}
public Address getCoordinator() {return coord;}
public Address getLocalAddress() {return local_addr;}
@ManagedAttribute(description="Number of messages in the forward-queue")
public int getFwdQueueSize() {return fwd_queue.size();}
@ManagedOperation
public void resetStats() {
request_msgs=response_msgs=bcasts_sent=bcasts_received=bcasts_delivered=0L;
sent_requests=received_requests=sent_responses=received_responses=0L; // reset number of sent and received requests and responses
}
public void start() throws Exception {
super.start();
running = true;
}
public void stop() {
running=false;
super.stop();
}
public Object down(Event evt) {
switch(evt.getType()) {
case Event.VIEW_CHANGE:
handleViewChange(evt.getArg());
break;
case Event.TMP_VIEW:
handleTmpView(evt.getArg());
break;
case Event.SET_LOCAL_ADDRESS:
local_addr=evt.getArg();
break;
}
return down_prot.down(evt);
}
public Object down(Message msg) {
if(msg.getDest() != null || msg.isFlagSet(Message.Flag.NO_TOTAL_ORDER) || msg.isFlagSet(Message.Flag.OOB))
return down_prot.down(msg);
if(msg.getSrc() == null)
msg.setSrc(local_addr);
try {
fwd_queue.put(msg);
if(seqno_reqs.getAndIncrement() == 0) {
int num_reqs=seqno_reqs.get();
sendSeqnoRequest(num_reqs);
}
}
catch(InterruptedException e) {
if(!running)
return null;
throw new RuntimeException(e);
}
return null; // don't pass down
}
public Object up(Event evt) {
switch(evt.getType()) {
case Event.VIEW_CHANGE:
Object retval=up_prot.up(evt);
handleViewChange(evt.getArg());
return retval;
case Event.TMP_VIEW:
handleTmpView(evt.getArg());
break;
}
return up_prot.up(evt);
}
public Object up(Message msg) {
SequencerHeader hdr;
if(msg.isFlagSet(Message.Flag.NO_TOTAL_ORDER) || msg.isFlagSet(Message.Flag.OOB))
return up_prot.up(msg);
hdr=msg.getHeader(this.id);
if(hdr == null)
return up_prot.up(msg); // pass up
switch(hdr.type) {
case SequencerHeader.REQUEST:
if(!is_coord) {
log.error("%s: non-coord; dropping REQUEST request from %s", local_addr, msg.getSrc());
return null;
}
Address sender=msg.getSrc();
if(view != null && !view.containsMember(sender)) {
log.error("%s : dropping REQUEST from non-member %s; view=%s" + view, local_addr, sender, view);
return null;
}
long new_seqno=seqno.getAndAdd(hdr.num_seqnos) +1;
sendSeqnoResponse(sender, new_seqno, hdr.num_seqnos);
received_requests++;
break;
case SequencerHeader.RESPONSE:
Address coordinator=msg.getSrc();
if(view != null && !view.containsMember(coordinator)) {
log.error(local_addr + "%s: dropping RESPONSE from non-coordinator %s; view=%s", local_addr, coordinator, view);
return null;
}
long send_seqno=hdr.seqno;
for(int i=0; i < hdr.num_seqnos; i++) {
Message bcast_msg=fwd_queue.poll();
if(bcast_msg == null) {
log.error(Util.getMessage("Received%DSeqnosButFwdqueueIsEmpty"), hdr.num_seqnos);
break;
}
if(log.isTraceEnabled())
log.trace("%s: broadcasting %d", local_addr, send_seqno);
broadcast(bcast_msg, send_seqno++);
}
int num_reqs=0;
if((num_reqs=seqno_reqs.addAndGet(-hdr.num_seqnos)) > 0 && num_reqs > 0)
sendSeqnoRequest(num_reqs);
break;
case SequencerHeader.BCAST:
deliver(msg, hdr);
bcasts_received++;
break;
}
return null;
}
/* public void up(MessageBatch batch) { // todo: better impl: add seq messages into the table in 1 op
List<Tuple<Long,Message>> msgs=null;
for(Iterator<Message> it=batch.iterator(); it.hasNext();) {
final Message msg=it.next();
if(msg == null || msg.isFlagSet(Message.Flag.NO_TOTAL_ORDER) || msg.isFlagSet(Message.Flag.OOB))
continue;
SequencerHeader hdr=(SequencerHeader)msg.getHeader(id);
if(hdr == null)
continue;
it.remove(); // we have a header; remove the message from the batch, so it won't be passed up the stack
switch(hdr.type) {
case SequencerHeader.REQUEST:
case SequencerHeader.RESPONSE:
up(msg);
break;
case SequencerHeader.BCAST:
if(msgs == null)
msgs=new ArrayList<Tuple<Long,Message>>(batch.size());
msgs.add(new Tuple<Long,Message>(hdr.seqno, msg));
break;
default:
log.error(Util.getMessage("HeaderTypeNotKnown"), local_addr, hdr.type);
}
}
if(msgs != null) {
Address sender=batch.sender();
if(sender == null) {
log.error(local_addr + ": sender is null, cannot deliver batch " + "::" + batch);
return;
}
final Table<Message> win=received_msgs;
System.out.println("<--B " + batch.sender() + "::" + batch);
win.add(msgs);
final AtomicBoolean processing=win.getProcessing();
if(processing.compareAndSet(false, true))
removeAndDeliver(processing, win, sender);
}
if(!batch.isEmpty())
up_prot.up(batch);
}*/
public void up(MessageBatch batch) {
for(Message msg: batch) {
if(msg.isFlagSet(Message.Flag.NO_TOTAL_ORDER) || msg.isFlagSet(Message.Flag.OOB) || msg.getHeader(id) == null)
continue;
batch.remove(msg);
// simplistic implementation
try {
up(msg);
}
catch(Throwable t) {
log.error(Util.getMessage("FailedPassingUpMessage"), t);
}
}
if(!batch.isEmpty())
up_prot.up(batch);
}
/* --------------------------------- Private Methods ----------------------------------- */
protected void handleViewChange(View v) {
List<Address> mbrs=v.getMembers();
if(mbrs.isEmpty()) return;
if(view == null || view.compareTo(v) < 0)
view=v;
else
return;
Address existing_coord=coord, new_coord=mbrs.get(0);
boolean coord_changed=!Objects.equals(existing_coord, new_coord);
if(coord_changed && new_coord != null) {
coord=new_coord;
// todo: if I'm the new coord, get the highest seqno from all members. If not, re-send my pending seqno reqs
}
if(new_coord != null)
is_coord=new_coord.equals(local_addr);
}
// If we're becoming coordinator, we need to handle TMP_VIEW as
// an immediate change of view. See JGRP-1452.
private void handleTmpView(View v) {
List<Address> mbrs=v.getMembers();
if(mbrs.isEmpty()) return;
Address new_coord=mbrs.get(0);
if(!new_coord.equals(coord) && local_addr != null && local_addr.equals(new_coord))
handleViewChange(v);
}
protected void sendSeqnoRequest(int num_seqnos) {
Address target=coord;
if(target == null)
return;
SequencerHeader hdr=new SequencerHeader(SequencerHeader.REQUEST, 0, num_seqnos);
Message forward_msg=new Message(target).putHeader(this.id, hdr);
down_prot.down(forward_msg);
sent_requests++;
}
protected void sendSeqnoResponse(Address original_sender,long seqno, int num_seqnos) {
SequencerHeader hdr = new SequencerHeader(SequencerHeader.RESPONSE, seqno, num_seqnos);
Message ucast_msg = new Message(original_sender).putHeader(this.id, hdr);
if (log.isTraceEnabled())
log.trace(local_addr + ": sending seqno response to " + original_sender + ":: new_seqno=" + seqno + ", num_seqnos=" + num_seqnos);
down_prot.down(ucast_msg);
sent_responses++;
}
protected void broadcast(final Message msg, long seqno) {
msg.putHeader(this.id, new SequencerHeader(SequencerHeader.BCAST, seqno));
if(log.isTraceEnabled())
log.trace(local_addr + ": broadcasting ::" + seqno);
down_prot.down(msg);
bcasts_sent++;
}
protected void deliver(Message msg, SequencerHeader hdr) {
Address sender=msg.getSrc();
if(sender == null) {
if(log.isErrorEnabled())
log.error(local_addr + ": sender is null, cannot deliver " + "::" + hdr.getSeqno());
return;
}
final Table<Message> win=received_msgs;
win.add(hdr.seqno, msg);
removeAndDeliver(win, sender);
}
protected void removeAndDeliver(Table<Message> win, Address sender) {
AtomicInteger adders=win.getAdders();
if(adders.getAndIncrement() != 0)
return;
final MessageBatch batch=new MessageBatch(win.size()).dest(local_addr).sender(sender).multicast(false);
Supplier<MessageBatch> batch_creator=() -> batch;
do {
try {
batch.reset();
win.removeMany(true, 0, null, batch_creator, BATCH_ACCUMULATOR);
}
catch(Throwable t) {
log.error("failed removing messages from table for " + sender, t);
}
if(!batch.isEmpty()) {
// batch is guaranteed to NOT contain any OOB messages as the drop_oob_msgs_filter removed them
deliverBatch(batch);
}
}
while(adders.decrementAndGet() != 0);
}
protected void deliverBatch(MessageBatch batch) {
try {
if(batch.isEmpty())
return;
if(log.isTraceEnabled()) {
Message first=batch.first(), last=batch.last();
StringBuilder sb=new StringBuilder(local_addr + ": delivering");
if(first != null && last != null) {
SequencerHeader hdr1=first.getHeader(id), hdr2=last.getHeader(id);
sb.append(" #").append(hdr1.seqno).append(" - #").append(hdr2.seqno);
}
sb.append(" (" + batch.size()).append(" messages)");
log.trace(sb);
}
up_prot.up(batch);
}
catch(Throwable t) {
log.error(Util.getMessage("FailedToDeliverMsg"), local_addr, "batch", batch, t);
}
}
/* ----------------------------- End of Private Methods -------------------------------- */
public static class SequencerHeader extends Header {
protected static final byte REQUEST = 1;
protected static final byte BCAST = 2;
protected static final byte RESPONSE = 3;
protected byte type;
protected long seqno;
protected int num_seqnos=1; // the number of seqnos requested (REQUEST) or returned (on a RESPONSE)
public SequencerHeader() {}
public SequencerHeader(byte type) {this.type=type;}
public SequencerHeader(byte type, long seqno) {
this(type, seqno, 1);
}
public SequencerHeader(byte type, long seqno, int num_seqnos) {
this(type);
this.seqno=seqno;
this.num_seqnos=num_seqnos;
}
public short getMagicId() {return 86;}
public Supplier<? extends Header> create() {
return SequencerHeader::new;
}
public long getSeqno() {return seqno;}
public String toString() {
StringBuilder sb=new StringBuilder(64);
sb.append(printType());
if(seqno >= 0)
sb.append(" seqno=" + seqno);
if(num_seqnos > 1)
sb.append(", num_seqnos=" + num_seqnos);
return sb.toString();
}
protected final String printType() {
switch(type) {
case REQUEST: return "REQUEST";
case BCAST: return "BCAST";
case RESPONSE: return "RESPONSE";
default: return "n/a";
}
}
public void writeTo(DataOutput out) throws Exception {
out.writeByte(type);
Bits.writeLong(seqno,out);
out.writeShort(num_seqnos);
}
public void readFrom(DataInput in) throws Exception {
type=in.readByte();
seqno=Bits.readLong(in);
num_seqnos=in.readUnsignedShort();
}
// type + seqno + localSeqno + flush_ack
public int serializedSize() {
return Global.BYTE_SIZE + Bits.size(seqno) + Global.SHORT_SIZE;
}
}
}
| |
/* Copyright 2004, 2005, 2006 Acegi Technology Pty Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.authentication.dao;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.isA;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.List;
import junit.framework.TestCase;
import org.springframework.dao.DataRetrievalFailureException;
import org.springframework.security.authentication.AccountExpiredException;
import org.springframework.security.authentication.AuthenticationServiceException;
import org.springframework.security.authentication.BadCredentialsException;
import org.springframework.security.authentication.CredentialsExpiredException;
import org.springframework.security.authentication.DisabledException;
import org.springframework.security.authentication.InternalAuthenticationServiceException;
import org.springframework.security.authentication.LockedException;
import org.springframework.security.authentication.TestingAuthenticationToken;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.authentication.encoding.ShaPasswordEncoder;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.AuthorityUtils;
import org.springframework.security.core.userdetails.User;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.core.userdetails.UsernameNotFoundException;
import org.springframework.security.core.userdetails.cache.EhCacheBasedUserCache;
import org.springframework.security.core.userdetails.cache.NullUserCache;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.security.crypto.password.PasswordEncoder;
/**
* Tests {@link DaoAuthenticationProvider}.
*
* @author Ben Alex
* @author Rob Winch
*/
public class DaoAuthenticationProviderTests extends TestCase {
private static final List<GrantedAuthority> ROLES_12 = AuthorityUtils
.createAuthorityList("ROLE_ONE", "ROLE_TWO");
// ~ Methods
// ========================================================================================================
public void testAuthenticateFailsForIncorrectPasswordCase() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"rod", "KOala");
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
provider.setUserCache(new MockUserCache());
try {
provider.authenticate(token);
fail("Should have thrown BadCredentialsException");
}
catch (BadCredentialsException expected) {
assertTrue(true);
}
}
public void testReceivedBadCredentialsWhenCredentialsNotProvided() {
// Test related to SEC-434
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
provider.setUserCache(new MockUserCache());
UsernamePasswordAuthenticationToken authenticationToken = new UsernamePasswordAuthenticationToken(
"rod", null);
try {
provider.authenticate(authenticationToken);
fail("Expected BadCredenialsException");
}
catch (BadCredentialsException expected) {
assertTrue(true);
}
}
public void testAuthenticateFailsIfAccountExpired() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"peter", "opal");
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserPeterAccountExpired());
provider.setUserCache(new MockUserCache());
try {
provider.authenticate(token);
fail("Should have thrown AccountExpiredException");
}
catch (AccountExpiredException expected) {
assertTrue(true);
}
}
public void testAuthenticateFailsIfAccountLocked() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"peter", "opal");
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserPeterAccountLocked());
provider.setUserCache(new MockUserCache());
try {
provider.authenticate(token);
fail("Should have thrown LockedException");
}
catch (LockedException expected) {
assertTrue(true);
}
}
public void testAuthenticateFailsIfCredentialsExpired() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"peter", "opal");
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserPeterCredentialsExpired());
provider.setUserCache(new MockUserCache());
try {
provider.authenticate(token);
fail("Should have thrown CredentialsExpiredException");
}
catch (CredentialsExpiredException expected) {
assertTrue(true);
}
// Check that wrong password causes BadCredentialsException, rather than
// CredentialsExpiredException
token = new UsernamePasswordAuthenticationToken("peter", "wrong_password");
try {
provider.authenticate(token);
fail("Should have thrown BadCredentialsException");
}
catch (BadCredentialsException expected) {
assertTrue(true);
}
}
public void testAuthenticateFailsIfUserDisabled() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"peter", "opal");
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserPeter());
provider.setUserCache(new MockUserCache());
try {
provider.authenticate(token);
fail("Should have thrown DisabledException");
}
catch (DisabledException expected) {
assertTrue(true);
}
}
public void testAuthenticateFailsWhenAuthenticationDaoHasBackendFailure() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"rod", "koala");
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setUserDetailsService(new MockAuthenticationDaoSimulateBackendError());
provider.setUserCache(new MockUserCache());
try {
provider.authenticate(token);
fail("Should have thrown InternalAuthenticationServiceException");
}
catch (InternalAuthenticationServiceException expected) {
}
}
public void testAuthenticateFailsWithEmptyUsername() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
null, "koala");
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
provider.setUserCache(new MockUserCache());
try {
provider.authenticate(token);
fail("Should have thrown BadCredentialsException");
}
catch (BadCredentialsException expected) {
assertTrue(true);
}
}
public void testAuthenticateFailsWithInvalidPassword() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"rod", "INVALID_PASSWORD");
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
provider.setUserCache(new MockUserCache());
try {
provider.authenticate(token);
fail("Should have thrown BadCredentialsException");
}
catch (BadCredentialsException expected) {
assertTrue(true);
}
}
public void testAuthenticateFailsWithInvalidUsernameAndHideUserNotFoundExceptionFalse() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"INVALID_USER", "koala");
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setHideUserNotFoundExceptions(false); // we want
// UsernameNotFoundExceptions
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
provider.setUserCache(new MockUserCache());
try {
provider.authenticate(token);
fail("Should have thrown UsernameNotFoundException");
}
catch (UsernameNotFoundException expected) {
assertTrue(true);
}
}
public void testAuthenticateFailsWithInvalidUsernameAndHideUserNotFoundExceptionsWithDefaultOfTrue() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"INVALID_USER", "koala");
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
assertTrue(provider.isHideUserNotFoundExceptions());
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
provider.setUserCache(new MockUserCache());
try {
provider.authenticate(token);
fail("Should have thrown BadCredentialsException");
}
catch (BadCredentialsException expected) {
assertTrue(true);
}
}
public void testAuthenticateFailsWithMixedCaseUsernameIfDefaultChanged() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"RoD", "koala");
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
provider.setUserCache(new MockUserCache());
try {
provider.authenticate(token);
fail("Should have thrown BadCredentialsException");
}
catch (BadCredentialsException expected) {
assertTrue(true);
}
}
public void testAuthenticates() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"rod", "koala");
token.setDetails("192.168.0.1");
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
provider.setUserCache(new MockUserCache());
Authentication result = provider.authenticate(token);
if (!(result instanceof UsernamePasswordAuthenticationToken)) {
fail("Should have returned instance of UsernamePasswordAuthenticationToken");
}
UsernamePasswordAuthenticationToken castResult = (UsernamePasswordAuthenticationToken) result;
assertEquals(User.class, castResult.getPrincipal().getClass());
assertEquals("koala", castResult.getCredentials());
assertTrue(AuthorityUtils.authorityListToSet(castResult.getAuthorities())
.contains("ROLE_ONE"));
assertTrue(AuthorityUtils.authorityListToSet(castResult.getAuthorities())
.contains("ROLE_TWO"));
assertEquals("192.168.0.1", castResult.getDetails());
}
public void testAuthenticatesASecondTime() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"rod", "koala");
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
provider.setUserCache(new MockUserCache());
Authentication result = provider.authenticate(token);
if (!(result instanceof UsernamePasswordAuthenticationToken)) {
fail("Should have returned instance of UsernamePasswordAuthenticationToken");
}
// Now try to authenticate with the previous result (with its UserDetails)
Authentication result2 = provider.authenticate(result);
if (!(result2 instanceof UsernamePasswordAuthenticationToken)) {
fail("Should have returned instance of UsernamePasswordAuthenticationToken");
}
assertEquals(result.getCredentials(), result2.getCredentials());
}
public void testAuthenticatesWhenASaltIsUsed() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"rod", "koala");
SystemWideSaltSource salt = new SystemWideSaltSource();
salt.setSystemWideSalt("SYSTEM_SALT_VALUE");
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserrodWithSalt());
provider.setSaltSource(salt);
provider.setUserCache(new MockUserCache());
Authentication result = provider.authenticate(token);
if (!(result instanceof UsernamePasswordAuthenticationToken)) {
fail("Should have returned instance of UsernamePasswordAuthenticationToken");
}
assertEquals(User.class, result.getPrincipal().getClass());
// We expect original credentials user submitted to be returned
assertEquals("koala", result.getCredentials());
assertTrue(AuthorityUtils.authorityListToSet(result.getAuthorities()).contains(
"ROLE_ONE"));
assertTrue(AuthorityUtils.authorityListToSet(result.getAuthorities()).contains(
"ROLE_TWO"));
}
public void testAuthenticatesWithForcePrincipalAsString() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"rod", "koala");
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
provider.setUserCache(new MockUserCache());
provider.setForcePrincipalAsString(true);
Authentication result = provider.authenticate(token);
if (!(result instanceof UsernamePasswordAuthenticationToken)) {
fail("Should have returned instance of UsernamePasswordAuthenticationToken");
}
UsernamePasswordAuthenticationToken castResult = (UsernamePasswordAuthenticationToken) result;
assertEquals(String.class, castResult.getPrincipal().getClass());
assertEquals("rod", castResult.getPrincipal());
}
public void testDetectsNullBeingReturnedFromAuthenticationDao() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"rod", "koala");
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setUserDetailsService(new MockAuthenticationDaoReturnsNull());
try {
provider.authenticate(token);
fail("Should have thrown AuthenticationServiceException");
}
catch (AuthenticationServiceException expected) {
assertEquals(
"UserDetailsService returned null, which is an interface contract violation",
expected.getMessage());
}
}
public void testGettersSetters() {
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setPasswordEncoder(new ShaPasswordEncoder());
assertEquals(ShaPasswordEncoder.class, provider.getPasswordEncoder().getClass());
provider.setSaltSource(new SystemWideSaltSource());
assertEquals(SystemWideSaltSource.class, provider.getSaltSource().getClass());
provider.setUserCache(new EhCacheBasedUserCache());
assertEquals(EhCacheBasedUserCache.class, provider.getUserCache().getClass());
assertFalse(provider.isForcePrincipalAsString());
provider.setForcePrincipalAsString(true);
assertTrue(provider.isForcePrincipalAsString());
}
public void testGoesBackToAuthenticationDaoToObtainLatestPasswordIfCachedPasswordSeemsIncorrect() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"rod", "koala");
MockAuthenticationDaoUserrod authenticationDao = new MockAuthenticationDaoUserrod();
MockUserCache cache = new MockUserCache();
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setUserDetailsService(authenticationDao);
provider.setUserCache(cache);
// This will work, as password still "koala"
provider.authenticate(token);
// Check "rod = koala" ended up in the cache
assertEquals("koala", cache.getUserFromCache("rod").getPassword());
// Now change the password the AuthenticationDao will return
authenticationDao.setPassword("easternLongNeckTurtle");
// Now try authentication again, with the new password
token = new UsernamePasswordAuthenticationToken("rod", "easternLongNeckTurtle");
provider.authenticate(token);
// To get this far, the new password was accepted
// Check the cache was updated
assertEquals("easternLongNeckTurtle", cache.getUserFromCache("rod").getPassword());
}
public void testStartupFailsIfNoAuthenticationDao() throws Exception {
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
try {
provider.afterPropertiesSet();
fail("Should have thrown IllegalArgumentException");
}
catch (IllegalArgumentException expected) {
assertTrue(true);
}
}
public void testStartupFailsIfNoUserCacheSet() throws Exception {
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
assertEquals(NullUserCache.class, provider.getUserCache().getClass());
provider.setUserCache(null);
try {
provider.afterPropertiesSet();
fail("Should have thrown IllegalArgumentException");
}
catch (IllegalArgumentException expected) {
assertTrue(true);
}
}
public void testStartupSuccess() throws Exception {
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
UserDetailsService userDetailsService = new MockAuthenticationDaoUserrod();
provider.setUserDetailsService(userDetailsService);
provider.setUserCache(new MockUserCache());
assertEquals(userDetailsService, provider.getUserDetailsService());
provider.afterPropertiesSet();
assertTrue(true);
}
public void testSupports() {
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
assertTrue(provider.supports(UsernamePasswordAuthenticationToken.class));
assertTrue(!provider.supports(TestingAuthenticationToken.class));
}
// SEC-2056
public void testUserNotFoundEncodesPassword() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"missing", "koala");
PasswordEncoder encoder = mock(PasswordEncoder.class);
when(encoder.encode(anyString())).thenReturn("koala");
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setHideUserNotFoundExceptions(false);
provider.setPasswordEncoder(encoder);
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
try {
provider.authenticate(token);
fail("Expected Exception");
}
catch (UsernameNotFoundException success) {
}
// ensure encoder invoked w/ non-null strings since PasswordEncoder impls may fail
// if encoded password is null
verify(encoder).matches(isA(String.class), isA(String.class));
}
public void testUserNotFoundBCryptPasswordEncoder() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"missing", "koala");
PasswordEncoder encoder = new BCryptPasswordEncoder();
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setHideUserNotFoundExceptions(false);
provider.setPasswordEncoder(encoder);
MockAuthenticationDaoUserrod userDetailsService = new MockAuthenticationDaoUserrod();
userDetailsService.password = encoder.encode((CharSequence) token
.getCredentials());
provider.setUserDetailsService(userDetailsService);
try {
provider.authenticate(token);
fail("Expected Exception");
}
catch (UsernameNotFoundException success) {
}
}
public void testUserNotFoundDefaultEncoder() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"missing", null);
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setHideUserNotFoundExceptions(false);
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
try {
provider.authenticate(token);
fail("Expected Exception");
}
catch (UsernameNotFoundException success) {
}
}
/**
* This is an explicit test for SEC-2056. It is intentionally ignored since this test
* is not deterministic and {@link #testUserNotFoundEncodesPassword()} ensures that
* SEC-2056 is fixed.
*/
public void IGNOREtestSec2056() {
UsernamePasswordAuthenticationToken foundUser = new UsernamePasswordAuthenticationToken(
"rod", "koala");
UsernamePasswordAuthenticationToken notFoundUser = new UsernamePasswordAuthenticationToken(
"notFound", "koala");
PasswordEncoder encoder = new BCryptPasswordEncoder(10, new SecureRandom());
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setHideUserNotFoundExceptions(false);
provider.setPasswordEncoder(encoder);
MockAuthenticationDaoUserrod userDetailsService = new MockAuthenticationDaoUserrod();
userDetailsService.password = encoder.encode((CharSequence) foundUser
.getCredentials());
provider.setUserDetailsService(userDetailsService);
int sampleSize = 100;
List<Long> userFoundTimes = new ArrayList<Long>(sampleSize);
for (int i = 0; i < sampleSize; i++) {
long start = System.currentTimeMillis();
provider.authenticate(foundUser);
userFoundTimes.add(System.currentTimeMillis() - start);
}
List<Long> userNotFoundTimes = new ArrayList<Long>(sampleSize);
for (int i = 0; i < sampleSize; i++) {
long start = System.currentTimeMillis();
try {
provider.authenticate(notFoundUser);
fail("Expected Exception");
}
catch (UsernameNotFoundException success) {
}
userNotFoundTimes.add(System.currentTimeMillis() - start);
}
double userFoundAvg = avg(userFoundTimes);
double userNotFoundAvg = avg(userNotFoundTimes);
assertTrue("User not found average " + userNotFoundAvg
+ " should be within 3ms of user found average " + userFoundAvg,
Math.abs(userNotFoundAvg - userFoundAvg) <= 3);
}
private double avg(List<Long> counts) {
long sum = 0;
for (Long time : counts) {
sum += time;
}
return sum / counts.size();
}
public void testUserNotFoundNullCredentials() {
UsernamePasswordAuthenticationToken token = new UsernamePasswordAuthenticationToken(
"missing", null);
PasswordEncoder encoder = mock(PasswordEncoder.class);
DaoAuthenticationProvider provider = new DaoAuthenticationProvider();
provider.setHideUserNotFoundExceptions(false);
provider.setPasswordEncoder(encoder);
provider.setUserDetailsService(new MockAuthenticationDaoUserrod());
try {
provider.authenticate(token);
fail("Expected Exception");
}
catch (UsernameNotFoundException success) {
}
verify(encoder, times(0)).matches(anyString(), anyString());
}
// ~ Inner Classes
// ==================================================================================================
private class MockAuthenticationDaoReturnsNull implements UserDetailsService {
public UserDetails loadUserByUsername(String username) {
return null;
}
}
private class MockAuthenticationDaoSimulateBackendError implements UserDetailsService {
public UserDetails loadUserByUsername(String username) {
throw new DataRetrievalFailureException(
"This mock simulator is designed to fail");
}
}
private class MockAuthenticationDaoUserrod implements UserDetailsService {
private String password = "koala";
public UserDetails loadUserByUsername(String username) {
if ("rod".equals(username)) {
return new User("rod", password, true, true, true, true, ROLES_12);
}
else {
throw new UsernameNotFoundException("Could not find: " + username);
}
}
public void setPassword(String password) {
this.password = password;
}
}
private class MockAuthenticationDaoUserrodWithSalt implements UserDetailsService {
public UserDetails loadUserByUsername(String username) {
if ("rod".equals(username)) {
return new User("rod", "koala{SYSTEM_SALT_VALUE}", true, true, true,
true, ROLES_12);
}
else {
throw new UsernameNotFoundException("Could not find: " + username);
}
}
}
private class MockAuthenticationDaoUserPeter implements UserDetailsService {
public UserDetails loadUserByUsername(String username) {
if ("peter".equals(username)) {
return new User("peter", "opal", false, true, true, true, ROLES_12);
}
else {
throw new UsernameNotFoundException("Could not find: " + username);
}
}
}
private class MockAuthenticationDaoUserPeterAccountExpired implements
UserDetailsService {
public UserDetails loadUserByUsername(String username) {
if ("peter".equals(username)) {
return new User("peter", "opal", true, false, true, true, ROLES_12);
}
else {
throw new UsernameNotFoundException("Could not find: " + username);
}
}
}
private class MockAuthenticationDaoUserPeterAccountLocked implements
UserDetailsService {
public UserDetails loadUserByUsername(String username) {
if ("peter".equals(username)) {
return new User("peter", "opal", true, true, true, false, ROLES_12);
}
else {
throw new UsernameNotFoundException("Could not find: " + username);
}
}
}
private class MockAuthenticationDaoUserPeterCredentialsExpired implements
UserDetailsService {
public UserDetails loadUserByUsername(String username) {
if ("peter".equals(username)) {
return new User("peter", "opal", true, true, false, true, ROLES_12);
}
else {
throw new UsernameNotFoundException("Could not find: " + username);
}
}
}
}
| |
package org.apache.lucene.search;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.MultiReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.similarities.DefaultSimilarity;
import org.apache.lucene.search.spans.SpanQuery;
import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.NamedThreadFactory;
import org.apache.lucene.util._TestUtil;
public class TestBooleanQuery extends LuceneTestCase {
public void testEquality() throws Exception {
BooleanQuery bq1 = new BooleanQuery();
bq1.add(new TermQuery(new Term("field", "value1")), BooleanClause.Occur.SHOULD);
bq1.add(new TermQuery(new Term("field", "value2")), BooleanClause.Occur.SHOULD);
BooleanQuery nested1 = new BooleanQuery();
nested1.add(new TermQuery(new Term("field", "nestedvalue1")), BooleanClause.Occur.SHOULD);
nested1.add(new TermQuery(new Term("field", "nestedvalue2")), BooleanClause.Occur.SHOULD);
bq1.add(nested1, BooleanClause.Occur.SHOULD);
BooleanQuery bq2 = new BooleanQuery();
bq2.add(new TermQuery(new Term("field", "value1")), BooleanClause.Occur.SHOULD);
bq2.add(new TermQuery(new Term("field", "value2")), BooleanClause.Occur.SHOULD);
BooleanQuery nested2 = new BooleanQuery();
nested2.add(new TermQuery(new Term("field", "nestedvalue1")), BooleanClause.Occur.SHOULD);
nested2.add(new TermQuery(new Term("field", "nestedvalue2")), BooleanClause.Occur.SHOULD);
bq2.add(nested2, BooleanClause.Occur.SHOULD);
assertEquals(bq1, bq2);
}
public void testException() {
try {
BooleanQuery.setMaxClauseCount(0);
fail();
} catch (IllegalArgumentException e) {
// okay
}
}
// LUCENE-1630
public void testNullOrSubScorer() throws Throwable {
Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(newTextField("field", "a b c d", Field.Store.NO));
w.addDocument(doc);
IndexReader r = w.getReader();
IndexSearcher s = newSearcher(r);
// this test relies upon coord being the default implementation,
// otherwise scores are different!
s.setSimilarity(new DefaultSimilarity());
BooleanQuery q = new BooleanQuery();
q.add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD);
// LUCENE-2617: make sure that a term not in the index still contributes to the score via coord factor
float score = s.search(q, 10).getMaxScore();
Query subQuery = new TermQuery(new Term("field", "not_in_index"));
subQuery.setBoost(0);
q.add(subQuery, BooleanClause.Occur.SHOULD);
float score2 = s.search(q, 10).getMaxScore();
assertEquals(score*.5F, score2, 1e-6);
// LUCENE-2617: make sure that a clause not in the index still contributes to the score via coord factor
BooleanQuery qq = q.clone();
PhraseQuery phrase = new PhraseQuery();
phrase.add(new Term("field", "not_in_index"));
phrase.add(new Term("field", "another_not_in_index"));
phrase.setBoost(0);
qq.add(phrase, BooleanClause.Occur.SHOULD);
score2 = s.search(qq, 10).getMaxScore();
assertEquals(score*(1/3F), score2, 1e-6);
// now test BooleanScorer2
subQuery = new TermQuery(new Term("field", "b"));
subQuery.setBoost(0);
q.add(subQuery, BooleanClause.Occur.MUST);
score2 = s.search(q, 10).getMaxScore();
assertEquals(score*(2/3F), score2, 1e-6);
// PhraseQuery w/ no terms added returns a null scorer
PhraseQuery pq = new PhraseQuery();
q.add(pq, BooleanClause.Occur.SHOULD);
assertEquals(1, s.search(q, 10).totalHits);
// A required clause which returns null scorer should return null scorer to
// IndexSearcher.
q = new BooleanQuery();
pq = new PhraseQuery();
q.add(new TermQuery(new Term("field", "a")), BooleanClause.Occur.SHOULD);
q.add(pq, BooleanClause.Occur.MUST);
assertEquals(0, s.search(q, 10).totalHits);
DisjunctionMaxQuery dmq = new DisjunctionMaxQuery(1.0f);
dmq.add(new TermQuery(new Term("field", "a")));
dmq.add(pq);
assertEquals(1, s.search(dmq, 10).totalHits);
r.close();
w.close();
dir.close();
}
public void testDeMorgan() throws Exception {
Directory dir1 = newDirectory();
RandomIndexWriter iw1 = new RandomIndexWriter(random(), dir1);
Document doc1 = new Document();
doc1.add(newTextField("field", "foo bar", Field.Store.NO));
iw1.addDocument(doc1);
IndexReader reader1 = iw1.getReader();
iw1.close();
Directory dir2 = newDirectory();
RandomIndexWriter iw2 = new RandomIndexWriter(random(), dir2);
Document doc2 = new Document();
doc2.add(newTextField("field", "foo baz", Field.Store.NO));
iw2.addDocument(doc2);
IndexReader reader2 = iw2.getReader();
iw2.close();
BooleanQuery query = new BooleanQuery(); // Query: +foo -ba*
query.add(new TermQuery(new Term("field", "foo")), BooleanClause.Occur.MUST);
WildcardQuery wildcardQuery = new WildcardQuery(new Term("field", "ba*"));
wildcardQuery.setRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_QUERY_REWRITE);
query.add(wildcardQuery, BooleanClause.Occur.MUST_NOT);
MultiReader multireader = new MultiReader(reader1, reader2);
IndexSearcher searcher = newSearcher(multireader);
assertEquals(0, searcher.search(query, 10).totalHits);
final ExecutorService es = Executors.newCachedThreadPool(new NamedThreadFactory("NRT search threads"));
searcher = new IndexSearcher(multireader, es);
if (VERBOSE)
System.out.println("rewritten form: " + searcher.rewrite(query));
assertEquals(0, searcher.search(query, 10).totalHits);
es.shutdown();
es.awaitTermination(1, TimeUnit.SECONDS);
multireader.close();
reader1.close();
reader2.close();
dir1.close();
dir2.close();
}
public void testBS2DisjunctionNextVsAdvance() throws Exception {
final Directory d = newDirectory();
final RandomIndexWriter w = new RandomIndexWriter(random(), d);
final int numDocs = atLeast(300);
for(int docUpto=0;docUpto<numDocs;docUpto++) {
String contents = "a";
if (random().nextInt(20) <= 16) {
contents += " b";
}
if (random().nextInt(20) <= 8) {
contents += " c";
}
if (random().nextInt(20) <= 4) {
contents += " d";
}
if (random().nextInt(20) <= 2) {
contents += " e";
}
if (random().nextInt(20) <= 1) {
contents += " f";
}
Document doc = new Document();
doc.add(new TextField("field", contents, Field.Store.NO));
w.addDocument(doc);
}
w.forceMerge(1);
final IndexReader r = w.getReader();
final IndexSearcher s = newSearcher(r);
w.close();
for(int iter=0;iter<10*RANDOM_MULTIPLIER;iter++) {
if (VERBOSE) {
System.out.println("iter=" + iter);
}
final List<String> terms = new ArrayList<String>(Arrays.asList("a", "b", "c", "d", "e", "f"));
final int numTerms = _TestUtil.nextInt(random(), 1, terms.size());
while(terms.size() > numTerms) {
terms.remove(random().nextInt(terms.size()));
}
if (VERBOSE) {
System.out.println(" terms=" + terms);
}
final BooleanQuery q = new BooleanQuery();
for(String term : terms) {
q.add(new BooleanClause(new TermQuery(new Term("field", term)), BooleanClause.Occur.SHOULD));
}
Weight weight = s.createNormalizedWeight(q);
Scorer scorer = weight.scorer(s.leafContexts.get(0),
true, false, null);
// First pass: just use .nextDoc() to gather all hits
final List<ScoreDoc> hits = new ArrayList<ScoreDoc>();
while(scorer.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
hits.add(new ScoreDoc(scorer.docID(), scorer.score()));
}
if (VERBOSE) {
System.out.println(" " + hits.size() + " hits");
}
// Now, randomly next/advance through the list and
// verify exact match:
for(int iter2=0;iter2<10;iter2++) {
weight = s.createNormalizedWeight(q);
scorer = weight.scorer(s.leafContexts.get(0),
true, false, null);
if (VERBOSE) {
System.out.println(" iter2=" + iter2);
}
int upto = -1;
while(upto < hits.size()) {
final int nextUpto;
final int nextDoc;
final int left = hits.size() - upto;
if (left == 1 || random().nextBoolean()) {
// next
nextUpto = 1+upto;
nextDoc = scorer.nextDoc();
} else {
// advance
int inc = _TestUtil.nextInt(random(), 1, left-1);
nextUpto = inc + upto;
nextDoc = scorer.advance(hits.get(nextUpto).doc);
}
if (nextUpto == hits.size()) {
assertEquals(DocIdSetIterator.NO_MORE_DOCS, nextDoc);
} else {
final ScoreDoc hit = hits.get(nextUpto);
assertEquals(hit.doc, nextDoc);
// Test for precise float equality:
assertTrue("doc " + hit.doc + " has wrong score: expected=" + hit.score + " actual=" + scorer.score(), hit.score == scorer.score());
}
upto = nextUpto;
}
}
}
r.close();
d.close();
}
// LUCENE-4477 / LUCENE-4401:
public void testBooleanSpanQuery() throws Exception {
boolean failed = false;
int hits = 0;
Directory directory = newDirectory();
Analyzer indexerAnalyzer = new MockAnalyzer(random());
IndexWriterConfig config = new IndexWriterConfig(TEST_VERSION_CURRENT, indexerAnalyzer);
IndexWriter writer = new IndexWriter(directory, config);
String FIELD = "content";
Document d = new Document();
d.add(new TextField(FIELD, "clockwork orange", Field.Store.YES));
writer.addDocument(d);
writer.close();
IndexReader indexReader = DirectoryReader.open(directory);
IndexSearcher searcher = newSearcher(indexReader);
BooleanQuery query = new BooleanQuery();
SpanQuery sq1 = new SpanTermQuery(new Term(FIELD, "clockwork"));
SpanQuery sq2 = new SpanTermQuery(new Term(FIELD, "clckwork"));
query.add(sq1, BooleanClause.Occur.SHOULD);
query.add(sq2, BooleanClause.Occur.SHOULD);
TopScoreDocCollector collector = TopScoreDocCollector.create(1000, true);
searcher.search(query, collector);
hits = collector.topDocs().scoreDocs.length;
for (ScoreDoc scoreDoc : collector.topDocs().scoreDocs){
System.out.println(scoreDoc.doc);
}
indexReader.close();
assertEquals("Bug in boolean query composed of span queries", failed, false);
assertEquals("Bug in boolean query composed of span queries", hits, 1);
directory.close();
}
}
| |
/*
* Copyright (c) 2011-2014 The original author or authors
* ------------------------------------------------------
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
*
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* The Apache License v2.0 is available at
* http://www.opensource.org/licenses/apache2.0.php
*
* You may elect to redistribute this code under either of these licenses.
*
* Derived from original file JsonObjectTest.java from Vert.x
*/
package io.mewbase.bson;
import io.mewbase.TestUtils;
import io.mewbase.client.MewException;
import io.vertx.core.buffer.Buffer;
import io.vertx.core.json.JsonObject;
import org.junit.Before;
import org.junit.Test;
import java.math.BigDecimal;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.*;
import java.util.stream.Collectors;
import static java.time.format.DateTimeFormatter.ISO_INSTANT;
import static org.junit.Assert.*;
/**
* @author <a href="http://tfox.org">Tim Fox</a>
*/
public class BsonObjectTest {
protected BsonObject bsonObject;
@Before
public void setUp() throws Exception {
bsonObject = new BsonObject();
}
@Test
public void testGetInteger() {
bsonObject.put("foo", 123);
assertEquals(Integer.valueOf(123), bsonObject.getInteger("foo"));
bsonObject.put("bar", "hello");
try {
bsonObject.getInteger("bar");
fail();
} catch (ClassCastException e) {
// Ok
}
// Put as different Number types
bsonObject.put("foo", 123L);
assertEquals(Integer.valueOf(123), bsonObject.getInteger("foo"));
bsonObject.put("foo", 123d);
assertEquals(Integer.valueOf(123), bsonObject.getInteger("foo"));
bsonObject.put("foo", 123f);
assertEquals(Integer.valueOf(123), bsonObject.getInteger("foo"));
bsonObject.put("foo", Long.MAX_VALUE);
assertEquals(Integer.valueOf(-1), bsonObject.getInteger("foo"));
// Null and absent values
bsonObject.putNull("foo");
assertNull(bsonObject.getInteger("foo"));
assertNull(bsonObject.getInteger("absent"));
try {
bsonObject.getInteger(null);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testGetIntegerDefault() {
bsonObject.put("foo", 123);
assertEquals(Integer.valueOf(123), bsonObject.getInteger("foo", 321));
assertEquals(Integer.valueOf(123), bsonObject.getInteger("foo", null));
bsonObject.put("bar", "hello");
try {
bsonObject.getInteger("bar", 123);
fail();
} catch (ClassCastException e) {
// Ok
}
// Put as different Number types
bsonObject.put("foo", 123l);
assertEquals(Integer.valueOf(123), bsonObject.getInteger("foo", 321));
bsonObject.put("foo", 123d);
assertEquals(Integer.valueOf(123), bsonObject.getInteger("foo", 321));
bsonObject.put("foo", 123f);
assertEquals(Integer.valueOf(123), bsonObject.getInteger("foo", 321));
bsonObject.put("foo", Long.MAX_VALUE);
assertEquals(Integer.valueOf(-1), bsonObject.getInteger("foo", 321));
// Null and absent values
bsonObject.putNull("foo");
assertNull(bsonObject.getInteger("foo", 321));
assertEquals(Integer.valueOf(321), bsonObject.getInteger("absent", 321));
assertNull(bsonObject.getInteger("foo", null));
assertNull(bsonObject.getInteger("absent", null));
try {
bsonObject.getInteger(null, null);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testGetLong() {
bsonObject.put("foo", 123l);
assertEquals(Long.valueOf(123l), bsonObject.getLong("foo"));
bsonObject.put("bar", "hello");
try {
bsonObject.getLong("bar");
fail();
} catch (ClassCastException e) {
// Ok
}
// Put as different Number types
bsonObject.put("foo", 123);
assertEquals(Long.valueOf(123l), bsonObject.getLong("foo"));
bsonObject.put("foo", 123d);
assertEquals(Long.valueOf(123l), bsonObject.getLong("foo"));
bsonObject.put("foo", 123f);
assertEquals(Long.valueOf(123l), bsonObject.getLong("foo"));
bsonObject.put("foo", Long.MAX_VALUE);
assertEquals(Long.valueOf(Long.MAX_VALUE), bsonObject.getLong("foo"));
// Null and absent values
bsonObject.putNull("foo");
assertNull(bsonObject.getLong("foo"));
assertNull(bsonObject.getLong("absent"));
try {
bsonObject.getLong(null);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testGetLongDefault() {
bsonObject.put("foo", 123l);
assertEquals(Long.valueOf(123l), bsonObject.getLong("foo", 321l));
assertEquals(Long.valueOf(123), bsonObject.getLong("foo", null));
bsonObject.put("bar", "hello");
try {
bsonObject.getLong("bar", 123l);
fail();
} catch (ClassCastException e) {
// Ok
}
// Put as different Number types
bsonObject.put("foo", 123);
assertEquals(Long.valueOf(123l), bsonObject.getLong("foo", 321l));
bsonObject.put("foo", 123d);
assertEquals(Long.valueOf(123l), bsonObject.getLong("foo", 321l));
bsonObject.put("foo", 123f);
assertEquals(Long.valueOf(123l), bsonObject.getLong("foo", 321l));
bsonObject.put("foo", Long.MAX_VALUE);
assertEquals(Long.valueOf(Long.MAX_VALUE), bsonObject.getLong("foo", 321l));
// Null and absent values
bsonObject.putNull("foo");
assertNull(bsonObject.getLong("foo", 321l));
assertEquals(Long.valueOf(321l), bsonObject.getLong("absent", 321l));
assertNull(bsonObject.getLong("foo", null));
assertNull(bsonObject.getLong("absent", null));
try {
bsonObject.getLong(null, null);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testGetFloat() {
bsonObject.put("foo", 123f);
assertEquals(Float.valueOf(123f), bsonObject.getFloat("foo"));
bsonObject.put("bar", "hello");
try {
bsonObject.getFloat("bar");
fail();
} catch (ClassCastException e) {
// Ok
}
// Put as different Number types
bsonObject.put("foo", 123);
assertEquals(Float.valueOf(123f), bsonObject.getFloat("foo"));
bsonObject.put("foo", 123d);
assertEquals(Float.valueOf(123f), bsonObject.getFloat("foo"));
bsonObject.put("foo", 123f);
assertEquals(Float.valueOf(123l), bsonObject.getFloat("foo"));
// Null and absent values
bsonObject.putNull("foo");
assertNull(bsonObject.getFloat("foo"));
assertNull(bsonObject.getFloat("absent"));
try {
bsonObject.getFloat(null);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testGetFloatDefault() {
bsonObject.put("foo", 123f);
assertEquals(Float.valueOf(123f), bsonObject.getFloat("foo", 321f));
assertEquals(Float.valueOf(123), bsonObject.getFloat("foo", null));
bsonObject.put("bar", "hello");
try {
bsonObject.getFloat("bar", 123f);
fail();
} catch (ClassCastException e) {
// Ok
}
// Put as different Number types
bsonObject.put("foo", 123);
assertEquals(Float.valueOf(123f), bsonObject.getFloat("foo", 321f));
bsonObject.put("foo", 123d);
assertEquals(Float.valueOf(123f), bsonObject.getFloat("foo", 321f));
bsonObject.put("foo", 123l);
assertEquals(Float.valueOf(123f), bsonObject.getFloat("foo", 321f));
// Null and absent values
bsonObject.putNull("foo");
assertNull(bsonObject.getFloat("foo", 321f));
assertEquals(Float.valueOf(321f), bsonObject.getFloat("absent", 321f));
assertNull(bsonObject.getFloat("foo", null));
assertNull(bsonObject.getFloat("absent", null));
try {
bsonObject.getFloat(null, null);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testGetDouble() {
bsonObject.put("foo", 123d);
assertEquals(Double.valueOf(123d), bsonObject.getDouble("foo"));
bsonObject.put("bar", "hello");
try {
bsonObject.getDouble("bar");
fail();
} catch (ClassCastException e) {
// Ok
}
// Put as different Number types
bsonObject.put("foo", 123);
assertEquals(Double.valueOf(123d), bsonObject.getDouble("foo"));
bsonObject.put("foo", 123l);
assertEquals(Double.valueOf(123d), bsonObject.getDouble("foo"));
bsonObject.put("foo", 123f);
assertEquals(Double.valueOf(123d), bsonObject.getDouble("foo"));
// Null and absent values
bsonObject.putNull("foo");
assertNull(bsonObject.getDouble("foo"));
assertNull(bsonObject.getDouble("absent"));
try {
bsonObject.getDouble(null);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testGetDoubleDefault() {
bsonObject.put("foo", 123d);
assertEquals(Double.valueOf(123d), bsonObject.getDouble("foo", 321d));
assertEquals(Double.valueOf(123), bsonObject.getDouble("foo", null));
bsonObject.put("bar", "hello");
try {
bsonObject.getDouble("bar", 123d);
fail();
} catch (ClassCastException e) {
// Ok
}
// Put as different Number types
bsonObject.put("foo", 123);
assertEquals(Double.valueOf(123d), bsonObject.getDouble("foo", 321d));
bsonObject.put("foo", 123f);
assertEquals(Double.valueOf(123d), bsonObject.getDouble("foo", 321d));
bsonObject.put("foo", 123l);
assertEquals(Double.valueOf(123d), bsonObject.getDouble("foo", 321d));
// Null and absent values
bsonObject.putNull("foo");
assertNull(bsonObject.getDouble("foo", 321d));
assertEquals(Double.valueOf(321d), bsonObject.getDouble("absent", 321d));
assertNull(bsonObject.getDouble("foo", null));
assertNull(bsonObject.getDouble("absent", null));
try {
bsonObject.getDouble(null, null);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testGetString() {
bsonObject.put("foo", "bar");
assertEquals("bar", bsonObject.getString("foo"));
bsonObject.put("bar", 123);
try {
bsonObject.getString("bar");
fail();
} catch (ClassCastException e) {
// Ok
}
// Null and absent values
bsonObject.putNull("foo");
assertNull(bsonObject.getString("foo"));
assertNull(bsonObject.getString("absent"));
try {
bsonObject.getString(null);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testGetStringDefault() {
bsonObject.put("foo", "bar");
assertEquals("bar", bsonObject.getString("foo", "wibble"));
assertEquals("bar", bsonObject.getString("foo", null));
bsonObject.put("bar", 123);
try {
bsonObject.getString("bar", "wibble");
fail();
} catch (ClassCastException e) {
// Ok
}
// Null and absent values
bsonObject.putNull("foo");
assertNull(bsonObject.getString("foo", "wibble"));
assertEquals("wibble", bsonObject.getString("absent", "wibble"));
assertNull(bsonObject.getString("foo", null));
assertNull(bsonObject.getString("absent", null));
try {
bsonObject.getString(null, null);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testGetBoolean() {
bsonObject.put("foo", true);
assertEquals(true, bsonObject.getBoolean("foo"));
bsonObject.put("foo", false);
assertEquals(false, bsonObject.getBoolean("foo"));
bsonObject.put("bar", 123);
try {
bsonObject.getBoolean("bar");
fail();
} catch (ClassCastException e) {
// Ok
}
// Null and absent values
bsonObject.putNull("foo");
assertNull(bsonObject.getBoolean("foo"));
assertNull(bsonObject.getBoolean("absent"));
try {
bsonObject.getBoolean(null);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testGetBooleanDefault() {
bsonObject.put("foo", true);
assertEquals(true, bsonObject.getBoolean("foo", false));
assertEquals(true, bsonObject.getBoolean("foo", null));
bsonObject.put("foo", false);
assertEquals(false, bsonObject.getBoolean("foo", true));
assertEquals(false, bsonObject.getBoolean("foo", null));
bsonObject.put("bar", 123);
try {
bsonObject.getBoolean("bar", true);
fail();
} catch (ClassCastException e) {
// Ok
}
// Null and absent values
bsonObject.putNull("foo");
assertNull(bsonObject.getBoolean("foo", true));
assertNull(bsonObject.getBoolean("foo", false));
assertEquals(true, bsonObject.getBoolean("absent", true));
assertEquals(false, bsonObject.getBoolean("absent", false));
try {
bsonObject.getBoolean(null, null);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testGetBinary() {
byte[] bytes = TestUtils.randomByteArray(100);
bsonObject.put("foo", bytes);
assertTrue(TestUtils.byteArraysEqual(bytes, bsonObject.getBinary("foo")));
// Can also get as string:
String val = bsonObject.getString("foo");
assertNotNull(val);
byte[] retrieved = Base64.getDecoder().decode(val);
assertTrue(TestUtils.byteArraysEqual(bytes, retrieved));
bsonObject.put("foo", 123);
try {
bsonObject.getBinary("foo");
fail();
} catch (ClassCastException e) {
// Ok
}
bsonObject.putNull("foo");
assertNull(bsonObject.getBinary("foo"));
assertNull(bsonObject.getBinary("absent"));
try {
bsonObject.getBinary(null);
fail();
} catch (NullPointerException e) {
// OK
}
try {
bsonObject.getBinary(null, null);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testGetInstant() {
Instant now = Instant.now();
bsonObject.put("foo", now);
assertEquals(now, bsonObject.getInstant("foo"));
// Can also get as string:
String val = bsonObject.getString("foo");
assertNotNull(val);
Instant retrieved = Instant.from(ISO_INSTANT.parse(val));
assertEquals(now, retrieved);
bsonObject.put("foo", 123);
try {
bsonObject.getInstant("foo");
fail();
} catch (ClassCastException e) {
// Ok
}
bsonObject.putNull("foo");
assertNull(bsonObject.getInstant("foo"));
assertNull(bsonObject.getInstant("absent"));
try {
bsonObject.getInstant(null);
fail();
} catch (NullPointerException e) {
// OK
}
try {
bsonObject.getInstant(null, null);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testGetBinaryDefault() {
byte[] bytes = TestUtils.randomByteArray(100);
byte[] defBytes = TestUtils.randomByteArray(100);
bsonObject.put("foo", bytes);
assertTrue(TestUtils.byteArraysEqual(bytes, bsonObject.getBinary("foo", defBytes)));
assertTrue(TestUtils.byteArraysEqual(bytes, bsonObject.getBinary("foo", null)));
bsonObject.put("foo", 123);
try {
bsonObject.getBinary("foo", defBytes);
fail();
} catch (ClassCastException e) {
// Ok
}
bsonObject.putNull("foo");
assertNull(bsonObject.getBinary("foo", defBytes));
assertTrue(TestUtils.byteArraysEqual(defBytes, bsonObject.getBinary("absent", defBytes)));
assertNull(bsonObject.getBinary("foo", null));
assertNull(bsonObject.getBinary("absent", null));
try {
bsonObject.getBinary(null, null);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testGetInstantDefault() {
Instant now = Instant.now();
Instant later = now.plus(1, ChronoUnit.DAYS);
bsonObject.put("foo", now);
assertEquals(now, bsonObject.getInstant("foo", later));
assertEquals(now, bsonObject.getInstant("foo", null));
bsonObject.put("foo", 123);
try {
bsonObject.getInstant("foo", later);
fail();
} catch (ClassCastException e) {
// Ok
}
bsonObject.putNull("foo");
assertNull(bsonObject.getInstant("foo", later));
assertEquals(later, bsonObject.getInstant("absent", later));
assertNull(bsonObject.getInstant("foo", null));
assertNull(bsonObject.getInstant("absent", null));
try {
bsonObject.getInstant(null, null);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testGetBsonObject() {
BsonObject obj = new BsonObject().put("blah", "wibble");
bsonObject.put("foo", obj);
assertEquals(obj, bsonObject.getBsonObject("foo"));
bsonObject.put("foo", "hello");
try {
bsonObject.getBsonObject("foo");
fail();
} catch (ClassCastException e) {
// Ok
}
bsonObject.putNull("foo");
assertNull(bsonObject.getBsonObject("foo"));
assertNull(bsonObject.getBsonObject("absent"));
try {
bsonObject.getBsonObject(null);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testGetBsonObjectDefault() {
BsonObject obj = new BsonObject().put("blah", "wibble");
BsonObject def = new BsonObject().put("eek", "quuz");
bsonObject.put("foo", obj);
assertEquals(obj, bsonObject.getBsonObject("foo", def));
assertEquals(obj, bsonObject.getBsonObject("foo", null));
bsonObject.put("foo", "hello");
try {
bsonObject.getBsonObject("foo", def);
fail();
} catch (ClassCastException e) {
// Ok
}
bsonObject.putNull("foo");
assertNull(bsonObject.getBsonObject("foo", def));
assertEquals(def, bsonObject.getBsonObject("absent", def));
assertNull(bsonObject.getBsonObject("foo", null));
assertNull(bsonObject.getBsonObject("absent", null));
try {
bsonObject.getBsonObject(null, null);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testGetBsonArray() {
BsonArray arr = new BsonArray().add("blah").add("wibble");
bsonObject.put("foo", arr);
assertEquals(arr, bsonObject.getBsonArray("foo"));
bsonObject.put("foo", "hello");
try {
bsonObject.getBsonArray("foo");
fail();
} catch (ClassCastException e) {
// Ok
}
bsonObject.putNull("foo");
assertNull(bsonObject.getBsonArray("foo"));
assertNull(bsonObject.getBsonArray("absent"));
try {
bsonObject.getBsonArray(null);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testGetBsonArrayDefault() {
BsonArray arr = new BsonArray().add("blah").add("wibble");
BsonArray def = new BsonArray().add("quux").add("eek");
bsonObject.put("foo", arr);
assertEquals(arr, bsonObject.getBsonArray("foo", def));
assertEquals(arr, bsonObject.getBsonArray("foo", null));
bsonObject.put("foo", "hello");
try {
bsonObject.getBsonArray("foo", def);
fail();
} catch (ClassCastException e) {
// Ok
}
bsonObject.putNull("foo");
assertNull(bsonObject.getBsonArray("foo", def));
assertEquals(def, bsonObject.getBsonArray("absent", def));
assertNull(bsonObject.getBsonArray("foo", null));
assertNull(bsonObject.getBsonArray("absent", null));
try {
bsonObject.getBsonArray(null, null);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testGetValue() {
bsonObject.put("foo", 123);
assertEquals(123, bsonObject.getValue("foo"));
bsonObject.put("foo", 123l);
assertEquals(123l, bsonObject.getValue("foo"));
bsonObject.put("foo", 123f);
assertEquals(123f, bsonObject.getValue("foo"));
bsonObject.put("foo", 123d);
assertEquals(123d, bsonObject.getValue("foo"));
bsonObject.put("foo", false);
assertEquals(false, bsonObject.getValue("foo"));
bsonObject.put("foo", true);
assertEquals(true, bsonObject.getValue("foo"));
bsonObject.put("foo", "bar");
assertEquals("bar", bsonObject.getValue("foo"));
BsonObject obj = new BsonObject().put("blah", "wibble");
bsonObject.put("foo", obj);
assertEquals(obj, bsonObject.getValue("foo"));
BsonArray arr = new BsonArray().add("blah").add("wibble");
bsonObject.put("foo", arr);
assertEquals(arr, bsonObject.getValue("foo"));
byte[] bytes = TestUtils.randomByteArray(100);
bsonObject.put("foo", bytes);
assertTrue(TestUtils.byteArraysEqual(bytes, Base64.getDecoder().decode((String)bsonObject.getValue("foo"))));
bsonObject.putNull("foo");
assertNull(bsonObject.getValue("foo"));
assertNull(bsonObject.getValue("absent"));
// BsonObject with inner Map
Map<String, Object> map = new HashMap<>();
Map<String, Object> innerMap = new HashMap<>();
innerMap.put("blah", "wibble");
map.put("foo", innerMap);
bsonObject = new BsonObject(map);
obj = (BsonObject)bsonObject.getValue("foo");
assertEquals("wibble", obj.getString("blah"));
// BsonObject with inner List
map = new HashMap<>();
List<Object> innerList = new ArrayList<>();
innerList.add("blah");
map.put("foo", innerList);
bsonObject = new BsonObject(map);
arr = (BsonArray)bsonObject.getValue("foo");
assertEquals("blah", arr.getString(0));
}
@Test
public void testGetValueDefault() {
bsonObject.put("foo", 123);
assertEquals(123, bsonObject.getValue("foo", "blah"));
assertEquals(123, bsonObject.getValue("foo", null));
bsonObject.put("foo", 123l);
assertEquals(123l, bsonObject.getValue("foo", "blah"));
assertEquals(123l, bsonObject.getValue("foo", null));
bsonObject.put("foo", 123f);
assertEquals(123f, bsonObject.getValue("foo", "blah"));
assertEquals(123f, bsonObject.getValue("foo", null));
bsonObject.put("foo", 123d);
assertEquals(123d, bsonObject.getValue("foo", "blah"));
assertEquals(123d, bsonObject.getValue("foo", null));
bsonObject.put("foo", false);
assertEquals(false, bsonObject.getValue("foo", "blah"));
assertEquals(false, bsonObject.getValue("foo", null));
bsonObject.put("foo", true);
assertEquals(true, bsonObject.getValue("foo", "blah"));
assertEquals(true, bsonObject.getValue("foo", null));
bsonObject.put("foo", "bar");
assertEquals("bar", bsonObject.getValue("foo", "blah"));
assertEquals("bar", bsonObject.getValue("foo", null));
BsonObject obj = new BsonObject().put("blah", "wibble");
bsonObject.put("foo", obj);
assertEquals(obj, bsonObject.getValue("foo", "blah"));
assertEquals(obj, bsonObject.getValue("foo", null));
BsonArray arr = new BsonArray().add("blah").add("wibble");
bsonObject.put("foo", arr);
assertEquals(arr, bsonObject.getValue("foo", "blah"));
assertEquals(arr, bsonObject.getValue("foo", null));
byte[] bytes = TestUtils.randomByteArray(100);
bsonObject.put("foo", bytes);
assertTrue(TestUtils.byteArraysEqual(bytes, Base64.getDecoder().decode((String)bsonObject.getValue("foo", "blah"))));
assertTrue(TestUtils.byteArraysEqual(bytes, Base64.getDecoder().decode((String)bsonObject.getValue("foo", null))));
bsonObject.putNull("foo");
assertNull(bsonObject.getValue("foo", "blah"));
assertNull(bsonObject.getValue("foo", null));
assertEquals("blah", bsonObject.getValue("absent", "blah"));
assertNull(bsonObject.getValue("absent", null));
}
@Test
public void testContainsKey() {
bsonObject.put("foo", "bar");
assertTrue(bsonObject.containsKey("foo"));
bsonObject.putNull("foo");
assertTrue(bsonObject.containsKey("foo"));
assertFalse(bsonObject.containsKey("absent"));
}
@Test
public void testFieldNames() {
bsonObject.put("foo", "bar");
bsonObject.put("eek", 123);
bsonObject.put("flib", new BsonObject());
Set<String> fieldNames = bsonObject.fieldNames();
assertEquals(3, fieldNames.size());
assertTrue(fieldNames.contains("foo"));
assertTrue(fieldNames.contains("eek"));
assertTrue(fieldNames.contains("flib"));
bsonObject.remove("foo");
assertEquals(2, fieldNames.size());
assertFalse(fieldNames.contains("foo"));
}
@Test
public void testSize() {
assertEquals(0, bsonObject.size());
bsonObject.put("foo", "bar");
assertEquals(1, bsonObject.size());
bsonObject.put("bar", 123);
assertEquals(2, bsonObject.size());
bsonObject.putNull("wibble");
assertEquals(3, bsonObject.size());
bsonObject.remove("wibble");
assertEquals(2, bsonObject.size());
bsonObject.clear();
assertEquals(0, bsonObject.size());
}
enum SomeEnum {
FOO, BAR
}
@Test
public void testPutEnum() {
assertSame(bsonObject, bsonObject.put("foo", SomeEnum.FOO));
assertEquals(SomeEnum.FOO.toString(), bsonObject.getString("foo"));
assertTrue(bsonObject.containsKey("foo"));
try {
bsonObject.put(null, SomeEnum.FOO);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testPutString() {
assertSame(bsonObject, bsonObject.put("foo", "bar"));
assertEquals("bar", bsonObject.getString("foo"));
bsonObject.put("quux", "wibble");
assertEquals("wibble", bsonObject.getString("quux"));
assertEquals("bar", bsonObject.getString("foo"));
bsonObject.put("foo", "blah");
assertEquals("blah", bsonObject.getString("foo"));
bsonObject.put("foo", (String)null);
assertTrue(bsonObject.containsKey("foo"));
try {
bsonObject.put(null, "blah");
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testPutCharSequence() {
assertSame(bsonObject, bsonObject.put("foo", new StringBuilder("bar")));
assertEquals("bar", bsonObject.getString("foo"));
assertEquals("bar", bsonObject.getString("foo", "def"));
bsonObject.put("quux", new StringBuilder("wibble"));
assertEquals("wibble", bsonObject.getString("quux"));
assertEquals("bar", bsonObject.getString("foo"));
bsonObject.put("foo", new StringBuilder("blah"));
assertEquals("blah", bsonObject.getString("foo"));
bsonObject.put("foo", (CharSequence)null);
assertTrue(bsonObject.containsKey("foo"));
try {
bsonObject.put(null, (CharSequence)"blah");
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testPutInteger() {
assertSame(bsonObject, bsonObject.put("foo", 123));
assertEquals(Integer.valueOf(123), bsonObject.getInteger("foo"));
bsonObject.put("quux", 321);
assertEquals(Integer.valueOf(321), bsonObject.getInteger("quux"));
assertEquals(Integer.valueOf(123), bsonObject.getInteger("foo"));
bsonObject.put("foo", 456);
assertEquals(Integer.valueOf(456), bsonObject.getInteger("foo"));
bsonObject.put("foo", (Integer)null);
assertTrue(bsonObject.containsKey("foo"));
try {
bsonObject.put(null, 123);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testPutLong() {
assertSame(bsonObject, bsonObject.put("foo", 123l));
assertEquals(Long.valueOf(123l), bsonObject.getLong("foo"));
bsonObject.put("quux", 321l);
assertEquals(Long.valueOf(321l), bsonObject.getLong("quux"));
assertEquals(Long.valueOf(123l), bsonObject.getLong("foo"));
bsonObject.put("foo", 456l);
assertEquals(Long.valueOf(456l), bsonObject.getLong("foo"));
bsonObject.put("foo", (Long)null);
assertTrue(bsonObject.containsKey("foo"));
try {
bsonObject.put(null, 123l);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testPutFloat() {
assertSame(bsonObject, bsonObject.put("foo", 123f));
assertEquals(Float.valueOf(123f), bsonObject.getFloat("foo"));
bsonObject.put("quux", 321f);
assertEquals(Float.valueOf(321f), bsonObject.getFloat("quux"));
assertEquals(Float.valueOf(123f), bsonObject.getFloat("foo"));
bsonObject.put("foo", 456f);
assertEquals(Float.valueOf(456f), bsonObject.getFloat("foo"));
bsonObject.put("foo", (Float)null);
assertTrue(bsonObject.containsKey("foo"));
try {
bsonObject.put(null, 1.2f);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testPutDouble() {
assertSame(bsonObject, bsonObject.put("foo", 123d));
assertEquals(Double.valueOf(123d), bsonObject.getDouble("foo"));
bsonObject.put("quux", 321d);
assertEquals(Double.valueOf(321d), bsonObject.getDouble("quux"));
assertEquals(Double.valueOf(123d), bsonObject.getDouble("foo"));
bsonObject.put("foo", 456d);
assertEquals(Double.valueOf(456d), bsonObject.getDouble("foo"));
bsonObject.put("foo", (Double)null);
assertTrue(bsonObject.containsKey("foo"));
try {
bsonObject.put(null, 1.23d);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testPutBoolean() {
assertSame(bsonObject, bsonObject.put("foo", true));
assertEquals(true, bsonObject.getBoolean("foo"));
bsonObject.put("quux", true);
assertEquals(true, bsonObject.getBoolean("quux"));
assertEquals(true, bsonObject.getBoolean("foo"));
bsonObject.put("foo", true);
assertEquals(true, bsonObject.getBoolean("foo"));
bsonObject.put("foo", (Boolean)null);
assertTrue(bsonObject.containsKey("foo"));
try {
bsonObject.put(null, false);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testPutBsonObject() {
BsonObject obj1 = new BsonObject().put("blah", "wibble");
BsonObject obj2 = new BsonObject().put("eeek", "flibb");
BsonObject obj3 = new BsonObject().put("floob", "plarp");
assertSame(bsonObject, bsonObject.put("foo", obj1));
assertEquals(obj1, bsonObject.getBsonObject("foo"));
bsonObject.put("quux", obj2);
assertEquals(obj2, bsonObject.getBsonObject("quux"));
assertEquals(obj1, bsonObject.getBsonObject("foo"));
bsonObject.put("foo", obj3);
assertEquals(obj3, bsonObject.getBsonObject("foo"));
bsonObject.put("foo", (BsonObject)null);
assertTrue(bsonObject.containsKey("foo"));
try {
bsonObject.put(null, new BsonObject());
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testPutBsonArray() {
BsonArray obj1 = new BsonArray().add("parp");
BsonArray obj2 = new BsonArray().add("fleep");
BsonArray obj3 = new BsonArray().add("woob");
assertSame(bsonObject, bsonObject.put("foo", obj1));
assertEquals(obj1, bsonObject.getBsonArray("foo"));
bsonObject.put("quux", obj2);
assertEquals(obj2, bsonObject.getBsonArray("quux"));
assertEquals(obj1, bsonObject.getBsonArray("foo"));
bsonObject.put("foo", obj3);
assertEquals(obj3, bsonObject.getBsonArray("foo"));
bsonObject.put("foo", (BsonArray)null);
assertTrue(bsonObject.containsKey("foo"));
try {
bsonObject.put(null, new BsonArray());
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testPutBinary() {
byte[] bin1 = TestUtils.randomByteArray(100);
byte[] bin2 = TestUtils.randomByteArray(100);
byte[] bin3 = TestUtils.randomByteArray(100);
assertSame(bsonObject, bsonObject.put("foo", bin1));
assertTrue(TestUtils.byteArraysEqual(bin1, bsonObject.getBinary("foo")));
bsonObject.put("quux", bin2);
assertTrue(TestUtils.byteArraysEqual(bin2, bsonObject.getBinary("quux")));
assertTrue(TestUtils.byteArraysEqual(bin1, bsonObject.getBinary("foo")));
bsonObject.put("foo", bin3);
assertTrue(TestUtils.byteArraysEqual(bin3, bsonObject.getBinary("foo")));
bsonObject.put("foo", (byte[])null);
assertTrue(bsonObject.containsKey("foo"));
try {
bsonObject.put(null, bin1);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testPutInstant() {
Instant bin1 = Instant.now();
Instant bin2 = bin1.plus(1, ChronoUnit.DAYS);
Instant bin3 = bin1.plus(1, ChronoUnit.MINUTES);
assertSame(bsonObject, bsonObject.put("foo", bin1));
assertEquals(bin1, bsonObject.getInstant("foo"));
bsonObject.put("quux", bin2);
assertEquals(bin2, bsonObject.getInstant("quux"));
assertEquals(bin1, bsonObject.getInstant("foo"));
bsonObject.put("foo", bin3);
assertEquals(bin3, bsonObject.getInstant("foo"));
bsonObject.put("foo", (Instant)null);
assertTrue(bsonObject.containsKey("foo"));
try {
bsonObject.put(null, bin1);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testPutNull() {
assertSame(bsonObject, bsonObject.putNull("foo"));
assertTrue(bsonObject.containsKey("foo"));
assertSame(bsonObject, bsonObject.putNull("bar"));
assertTrue(bsonObject.containsKey("bar"));
try {
bsonObject.putNull(null);
fail();
} catch (NullPointerException e) {
// OK
}
}
@Test
public void testPutValue() {
bsonObject.put("str", (Object)"bar");
bsonObject.put("int", (Object)(Integer.valueOf(123)));
bsonObject.put("long", (Object)(Long.valueOf(123l)));
bsonObject.put("float", (Object)(Float.valueOf(1.23f)));
bsonObject.put("double", (Object)(Double.valueOf(1.23d)));
bsonObject.put("boolean", (Object)true);
byte[] bytes = TestUtils.randomByteArray(10);
bsonObject.put("binary", (Object)(bytes));
Instant now = Instant.now();
bsonObject.put("instant", now);
BsonObject obj = new BsonObject().put("foo", "blah");
BsonArray arr = new BsonArray().add("quux");
bsonObject.put("obj", (Object)obj);
bsonObject.put("arr", (Object)arr);
assertEquals("bar", bsonObject.getString("str"));
assertEquals(Integer.valueOf(123), bsonObject.getInteger("int"));
assertEquals(Long.valueOf(123l), bsonObject.getLong("long"));
assertEquals(Float.valueOf(1.23f), bsonObject.getFloat("float"));
assertEquals(Double.valueOf(1.23d), bsonObject.getDouble("double"));
assertTrue(TestUtils.byteArraysEqual(bytes, bsonObject.getBinary("binary")));
assertEquals(now, bsonObject.getInstant("instant"));
assertEquals(obj, bsonObject.getBsonObject("obj"));
assertEquals(arr, bsonObject.getBsonArray("arr"));
try {
bsonObject.put("inv", new SomeClass());
fail();
} catch (IllegalStateException e) {
// OK
}
try {
bsonObject.put("inv", new BigDecimal(123));
fail();
} catch (IllegalStateException e) {
// OK
}
try {
bsonObject.put("inv", new Date());
fail();
} catch (IllegalStateException e) {
// OK
}
}
@Test
public void testMergeIn1() {
BsonObject obj1 = new BsonObject().put("foo", "bar");
BsonObject obj2 = new BsonObject().put("eek", "flurb");
obj1.mergeIn(obj2);
assertEquals(2, obj1.size());
assertEquals("bar", obj1.getString("foo"));
assertEquals("flurb", obj1.getString("eek"));
assertEquals(1, obj2.size());
assertEquals("flurb", obj2.getString("eek"));
}
@Test
public void testMergeIn2() {
BsonObject obj1 = new BsonObject().put("foo", "bar");
BsonObject obj2 = new BsonObject().put("foo", "flurb");
obj1.mergeIn(obj2);
assertEquals(1, obj1.size());
assertEquals("flurb", obj1.getString("foo"));
assertEquals(1, obj2.size());
assertEquals("flurb", obj2.getString("foo"));
}
@Test
public void testEncode() throws Exception {
bsonObject.put("mystr", "foo");
bsonObject.put("mycharsequence", new StringBuilder("oob"));
bsonObject.put("myint", 123);
bsonObject.put("mylong", 1234l);
bsonObject.put("myfloat", 1.23f);
bsonObject.put("mydouble", 2.34d);
bsonObject.put("myboolean", true);
byte[] bytes = TestUtils.randomByteArray(10);
bsonObject.put("mybinary", bytes);
Instant now = Instant.now();
bsonObject.put("myinstant", now);
bsonObject.putNull("mynull");
bsonObject.put("myobj", new BsonObject().put("foo", "bar"));
bsonObject.put("myarr", new BsonArray().add("foo").add(123));
Buffer encoded = bsonObject.encode();
BsonObject obj = new BsonObject(encoded);
assertEquals("foo", obj.getString("mystr"));
assertEquals("oob", obj.getString("mycharsequence"));
assertEquals(Integer.valueOf(123), obj.getInteger("myint"));
assertEquals(Long.valueOf(1234), obj.getLong("mylong"));
assertEquals(Float.valueOf(1.23f), obj.getFloat("myfloat"));
assertEquals(Double.valueOf(2.34d), obj.getDouble("mydouble"));
assertTrue(obj.getBoolean("myboolean"));
assertTrue(TestUtils.byteArraysEqual(bytes, obj.getBinary("mybinary")));
assertEquals(now, obj.getInstant("myinstant"));
assertTrue(obj.containsKey("mynull"));
BsonObject nestedObj = obj.getBsonObject("myobj");
assertEquals("bar", nestedObj.getString("foo"));
BsonArray nestedArr = obj.getBsonArray("myarr");
assertEquals("foo", nestedArr.getString(0));
assertEquals(Integer.valueOf(123), Integer.valueOf(nestedArr.getInteger(1)));
}
@Test
public void testEncodeToString() throws Exception {
bsonObject.put("mystr", "foo");
bsonObject.put("mycharsequence", new StringBuilder("oob"));
bsonObject.put("myint", 123);
bsonObject.put("mylong", 1234l);
bsonObject.put("myfloat", 1.23f);
bsonObject.put("mydouble", 2.34d);
bsonObject.put("myboolean", true);
byte[] bytes = new byte[] {4, 7, 89, 32, 24};
bsonObject.put("mybinary", bytes);
Instant now = Instant.ofEpochMilli(16235126312635L);
bsonObject.put("myinstant", now);
bsonObject.putNull("mynull");
bsonObject.put("myobj", new BsonObject().put("foo", "bar"));
bsonObject.put("myarr", new BsonArray().add("foo").add(123));
String str = bsonObject.encodeToString();
assertEquals("{\"myboolean\":true,\"myfloat\":1.23,\"myobj\":{\"foo\":\"bar\"},\"mylong\":1234,\"mydou" +
"ble\":2.34,\"mycharsequence\":\"oob\",\"mybinary\":\"BAdZIBg=\",\"myinstant\":\"2484-06-20T13:18:32.63" +
"5Z\",\"myint\":123,\"mystr\":\"foo\",\"myarr\":[\"foo\",123],\"mynull\":null}", str);
}
@Test
public void testEncodeSize() throws Exception {
bsonObject.put("foo", "bar");
Buffer encoded = bsonObject.encode();
int length = encoded.getIntLE(0);
assertEquals(encoded.length(), length);
}
@Test
public void testInvalidJson() {
Buffer invalid = Buffer.buffer(TestUtils.randomByteArray(100));
try {
new BsonObject(invalid);
fail();
} catch (MewException e) {
// OK
}
}
@Test
public void testClear() {
bsonObject.put("foo", "bar");
bsonObject.put("quux", 123);
assertEquals(2, bsonObject.size());
bsonObject.clear();
assertEquals(0, bsonObject.size());
assertNull(bsonObject.getValue("foo"));
assertNull(bsonObject.getValue("quux"));
}
@Test
public void testIsEmpty() {
assertTrue(bsonObject.isEmpty());
bsonObject.put("foo", "bar");
bsonObject.put("quux", 123);
assertFalse(bsonObject.isEmpty());
bsonObject.clear();
assertTrue(bsonObject.isEmpty());
}
@Test
public void testRemove() {
bsonObject.put("mystr", "bar");
bsonObject.put("myint", 123);
assertEquals("bar", bsonObject.remove("mystr"));
assertNull(bsonObject.getValue("mystr"));
assertEquals(123, bsonObject.remove("myint"));
assertNull(bsonObject.getValue("myint"));
assertTrue(bsonObject.isEmpty());
}
@Test
public void testIterator() {
bsonObject.put("foo", "bar");
bsonObject.put("quux", 123);
BsonObject obj = createBsonObject();
bsonObject.put("wibble", obj);
Iterator<Map.Entry<String, Object>> iter = bsonObject.iterator();
assertTrue(iter.hasNext());
Map.Entry<String, Object> entry = iter.next();
assertEquals("foo", entry.getKey());
assertEquals("bar", entry.getValue());
assertTrue(iter.hasNext());
entry = iter.next();
assertEquals("quux", entry.getKey());
assertEquals(123, entry.getValue());
assertTrue(iter.hasNext());
entry = iter.next();
assertEquals("wibble", entry.getKey());
assertEquals(obj, entry.getValue());
assertFalse(iter.hasNext());
iter.remove();
assertFalse(obj.containsKey("wibble"));
assertEquals(2, bsonObject.size());
}
@Test
public void testIteratorDoesntChangeObject() {
Map<String, Object> map = new LinkedHashMap<>();
map.put("nestedMap", new HashMap<>());
map.put("nestedList", new ArrayList<>());
BsonObject obj = new BsonObject(map);
Iterator<Map.Entry<String, Object>> iter = obj.iterator();
Map.Entry<String, Object> entry1 = iter.next();
assertEquals("nestedMap", entry1.getKey());
Object val1 = entry1.getValue();
assertTrue(val1 instanceof BsonObject);
Map.Entry<String, Object> entry2 = iter.next();
assertEquals("nestedList", entry2.getKey());
Object val2 = entry2.getValue();
assertTrue(val2 instanceof BsonArray);
assertTrue(map.get("nestedMap") instanceof HashMap);
assertTrue(map.get("nestedList") instanceof ArrayList);
}
@Test
public void testStream() {
bsonObject.put("foo", "bar");
bsonObject.put("quux", 123);
BsonObject obj = createBsonObject();
bsonObject.put("wibble", obj);
List<Map.Entry<String, Object>> list = bsonObject.stream().collect(Collectors.toList());
Iterator<Map.Entry<String, Object>> iter = list.iterator();
assertTrue(iter.hasNext());
Map.Entry<String, Object> entry = iter.next();
assertEquals("foo", entry.getKey());
assertEquals("bar", entry.getValue());
assertTrue(iter.hasNext());
entry = iter.next();
assertEquals("quux", entry.getKey());
assertEquals(123, entry.getValue());
assertTrue(iter.hasNext());
entry = iter.next();
assertEquals("wibble", entry.getKey());
assertEquals(obj, entry.getValue());
assertFalse(iter.hasNext());
}
@Test
public void testCopy() {
bsonObject.put("foo", "bar");
bsonObject.put("quux", 123);
BsonObject obj = createBsonObject();
bsonObject.put("wibble", obj);
bsonObject.put("eek", new StringBuilder("blah")); // CharSequence
BsonObject copy = bsonObject.copy();
assertNotSame(bsonObject, copy);
assertEquals(bsonObject, copy);
copy.put("blah", "flib");
assertFalse(bsonObject.containsKey("blah"));
copy.remove("foo");
assertFalse(copy.containsKey("foo"));
assertTrue(bsonObject.containsKey("foo"));
bsonObject.put("oob", "flarb");
assertFalse(copy.containsKey("oob"));
bsonObject.remove("quux");
assertFalse(bsonObject.containsKey("quux"));
assertTrue(copy.containsKey("quux"));
BsonObject nested = bsonObject.getBsonObject("wibble");
BsonObject nestedCopied = copy.getBsonObject("wibble");
assertNotSame(nested, nestedCopied);
assertEquals(nested, nestedCopied);
assertEquals("blah", copy.getString("eek"));
}
@Test
public void testInvalidValsOnCopy1() {
Map<String, Object> invalid = new HashMap<>();
invalid.put("foo", new SomeClass());
BsonObject object = new BsonObject(invalid);
try {
object.copy();
fail();
} catch (IllegalStateException e) {
// OK
}
}
@Test
public void testInvalidValsOnCopy2() {
Map<String, Object> invalid = new HashMap<>();
Map<String, Object> invalid2 = new HashMap<>();
invalid2.put("foo", new SomeClass());
invalid.put("bar", invalid2);
BsonObject object = new BsonObject(invalid);
try {
object.copy();
fail();
} catch (IllegalStateException e) {
// OK
}
}
@Test
public void testInvalidValsOnCopy3() {
Map<String, Object> invalid = new HashMap<>();
List<Object> invalid2 = new ArrayList<>();
invalid2.add(new SomeClass());
invalid.put("bar", invalid2);
BsonObject object = new BsonObject(invalid);
try {
object.copy();
fail();
} catch (IllegalStateException e) {
// OK
}
}
class SomeClass {
}
@Test
public void testGetMap() {
bsonObject.put("foo", "bar");
bsonObject.put("quux", 123);
BsonObject obj = createBsonObject();
bsonObject.put("wibble", obj);
Map<String, Object> map = bsonObject.getMap();
map.remove("foo");
assertFalse(bsonObject.containsKey("foo"));
map.put("bleep", "flarp");
assertTrue(bsonObject.containsKey("bleep"));
bsonObject.remove("quux");
assertFalse(map.containsKey("quux"));
bsonObject.put("wooble", "plink");
assertTrue(map.containsKey("wooble"));
assertSame(obj, map.get("wibble"));
}
@Test
public void testCreateFromMap() {
Map<String, Object> map = new HashMap<>();
map.put("foo", "bar");
map.put("quux", 123);
BsonObject obj = new BsonObject(map);
assertEquals("bar", obj.getString("foo"));
assertEquals(Integer.valueOf(123), obj.getInteger("quux"));
assertSame(map, obj.getMap());
}
@Test
public void testCreateFromMapCharSequence() {
Map<String, Object> map = new HashMap<>();
map.put("foo", "bar");
map.put("quux", 123);
map.put("eeek", new StringBuilder("blah"));
BsonObject obj = new BsonObject(map);
assertEquals("bar", obj.getString("foo"));
assertEquals(Integer.valueOf(123), obj.getInteger("quux"));
assertEquals("blah", obj.getString("eeek"));
assertSame(map, obj.getMap());
}
@Test
public void testCreateFromMapNestedBsonObject() {
Map<String, Object> map = new HashMap<>();
BsonObject nestedObj = new BsonObject().put("foo", "bar");
map.put("nested", nestedObj);
BsonObject obj = new BsonObject(map);
BsonObject nestedRetrieved = obj.getBsonObject("nested");
assertEquals("bar", nestedRetrieved.getString("foo"));
}
@Test
public void testCreateFromMapNestedMap() {
Map<String, Object> map = new HashMap<>();
Map<String, Object> nestedMap = new HashMap<>();
nestedMap.put("foo", "bar");
map.put("nested", nestedMap);
BsonObject obj = new BsonObject(map);
BsonObject nestedRetrieved = obj.getBsonObject("nested");
assertEquals("bar", nestedRetrieved.getString("foo"));
}
@Test
public void testCreateFromMapNestedBsonArray() {
Map<String, Object> map = new HashMap<>();
BsonArray nestedArr = new BsonArray().add("foo");
map.put("nested", nestedArr);
BsonObject obj = new BsonObject(map);
BsonArray nestedRetrieved = obj.getBsonArray("nested");
assertEquals("foo", nestedRetrieved.getString(0));
}
@Test
public void testCreateFromMapNestedList() {
Map<String, Object> map = new HashMap<>();
List<String> nestedArr = Arrays.asList("foo");
map.put("nested", nestedArr);
BsonObject obj = new BsonObject(map);
BsonArray nestedRetrieved = obj.getBsonArray("nested");
assertEquals("foo", nestedRetrieved.getString(0));
}
@Test
public void testNumberEquality() {
assertNumberEquals(4, 4);
assertNumberEquals(4, (long)4);
assertNumberEquals(4, 4f);
assertNumberEquals(4, 4D);
assertNumberEquals((long)4, (long)4);
assertNumberEquals((long)4, 4f);
assertNumberEquals((long)4, 4D);
assertNumberEquals(4f, 4f);
assertNumberEquals(4f, 4D);
assertNumberEquals(4D, 4D);
assertNumberEquals(4.1D, 4.1D);
assertNumberEquals(4.1f, 4.1f);
assertNumberNotEquals(4.1f, 4.1D);
assertNumberEquals(4.5D, 4.5D);
assertNumberEquals(4.5f, 4.5f);
assertNumberEquals(4.5f, 4.5D);
assertNumberNotEquals(4, 5);
assertNumberNotEquals(4, (long)5);
assertNumberNotEquals(4, 5D);
assertNumberNotEquals(4, 5f);
assertNumberNotEquals((long)4, (long)5);
assertNumberNotEquals((long)4, 5D);
assertNumberNotEquals((long)4, 5f);
assertNumberNotEquals(4f, 5f);
assertNumberNotEquals(4f, 5D);
assertNumberNotEquals(4D, 5D);
}
private void assertNumberEquals(Number value1, Number value2) {
BsonObject o1 = new BsonObject().put("key", value1);
BsonObject o2 = new BsonObject().put("key", value2);
if (!o1.equals(o2)) {
fail("Was expecting " + value1.getClass().getSimpleName() + ":" + value1 + " == " +
value2.getClass().getSimpleName() + ":" + value2);
}
BsonArray a1 = new BsonArray().add(value1);
BsonArray a2 = new BsonArray().add(value2);
if (!a1.equals(a2)) {
fail("Was expecting " + value1.getClass().getSimpleName() + ":" + value1 + " == " +
value2.getClass().getSimpleName() + ":" + value2);
}
}
private void assertNumberNotEquals(Number value1, Number value2) {
BsonObject o1 = new BsonObject().put("key", value1);
BsonObject o2 = new BsonObject().put("key", value2);
if (o1.equals(o2)) {
fail("Was expecting " + value1.getClass().getSimpleName() + ":" + value1 + " != " +
value2.getClass().getSimpleName() + ":" + value2);
}
}
@Test
public void testBsonObjectEquality() {
BsonObject obj = new BsonObject(Collections.singletonMap("abc", Collections.singletonMap("def", 3)));
assertEquals(obj, new BsonObject(Collections.singletonMap("abc", Collections.singletonMap("def", 3))));
assertEquals(obj, new BsonObject(Collections.singletonMap("abc", Collections.singletonMap("def", 3L))));
assertEquals(obj, new BsonObject(Collections.singletonMap("abc", new BsonObject().put("def", 3))));
assertEquals(obj, new BsonObject(Collections.singletonMap("abc", new BsonObject().put("def", 3L))));
assertNotEquals(obj, new BsonObject(Collections.singletonMap("abc", Collections.singletonMap("def", 4))));
assertNotEquals(obj, new BsonObject(Collections.singletonMap("abc", new BsonObject().put("def", 4))));
BsonArray array = new BsonArray(Collections.singletonList(Collections.singletonMap("def", 3)));
assertEquals(array, new BsonArray(Collections.singletonList(Collections.singletonMap("def", 3))));
assertEquals(array, new BsonArray(Collections.singletonList(Collections.singletonMap("def", 3L))));
assertEquals(array, new BsonArray(Collections.singletonList(new BsonObject().put("def", 3))));
assertEquals(array, new BsonArray(Collections.singletonList(new BsonObject().put("def", 3L))));
assertNotEquals(array, new BsonArray(Collections.singletonList(Collections.singletonMap("def", 4))));
assertNotEquals(array, new BsonArray(Collections.singletonList(new BsonObject().put("def", 4))));
}
@Test
public void testBsonObjectEquality2() {
BsonObject obj1 = new BsonObject().put("arr", new BsonArray().add("x"));
List<Object> list = new ArrayList<>();
list.add("x");
Map<String, Object> map = new HashMap<>();
map.put("arr", list);
BsonObject obj2 = new BsonObject(map);
Iterator<Map.Entry<String, Object>> iter = obj2.iterator();
// There was a bug where iteration of entries caused the underlying object to change resulting in a
// subsequent equals changing
while (iter.hasNext()) {
Map.Entry<String, Object> entry = iter.next();
}
assertEquals(obj2, obj1);
}
@Test
public void testPutInstantAsObject() {
Object instant = Instant.now();
BsonObject BsonObject = new BsonObject();
BsonObject.put("instant", instant);
// assert data is stored as String
assertTrue(BsonObject.getValue("instant") instanceof String);
}
@Test
public void testStreamCorrectTypes() throws Exception {
BsonObject object = new BsonObject();
object.put("object1", new BsonObject().put("object2", 12));
testStreamCorrectTypes(object.copy());
testStreamCorrectTypes(object);
}
@Test
public void testRemoveMethodReturnedObject() {
BsonObject obj = new BsonObject();
obj.put("simple", "bar")
.put("object", new BsonObject().put("name", "vert.x").put("count", 2))
.put("array", new BsonArray().add(1.0).add(2.0));
Object removed = obj.remove("missing");
assertNull(removed);
removed = obj.remove("simple");
assertTrue(removed instanceof String);
removed = obj.remove("object");
assertTrue(removed instanceof BsonObject);
assertEquals(((BsonObject)removed).getString("name"), "vert.x");
removed = obj.remove("array");
assertTrue(removed instanceof BsonArray);
assertEquals(((BsonArray)removed).getDouble(0), 1.0, 0.0);
}
@Test
public void testJsonObjectConversion() {
JsonObject jsonObject = createJsonObject();
BsonObject bsonObject = new BsonObject(jsonObject);
assertEquals(jsonObject.getMap(), bsonObject.getMap());
JsonObject jsonObject2 = bsonObject.toJsonObject();
assertEquals(jsonObject, jsonObject2);
}
private void testStreamCorrectTypes(BsonObject object) {
object.stream().forEach(entry -> {
String key = entry.getKey();
Object val = entry.getValue();
assertEquals("object1", key);
assertTrue("Expecting BsonObject, found: " + val.getClass().getCanonicalName(), val instanceof BsonObject);
});
}
private BsonObject createBsonObject() {
BsonObject obj = new BsonObject();
obj.put("mystr", "bar");
obj.put("myint", Integer.MAX_VALUE);
obj.put("mylong", Long.MAX_VALUE);
obj.put("myfloat", Float.MAX_VALUE);
obj.put("mydouble", Double.MAX_VALUE);
obj.put("myboolean", true);
obj.put("mybinary", TestUtils.randomByteArray(100));
obj.put("myinstant", Instant.now());
return obj;
}
private JsonObject createJsonObject() {
JsonObject obj = new JsonObject();
obj.put("mystr", "bar");
obj.put("myint", Integer.MAX_VALUE);
obj.put("mylong", Long.MAX_VALUE);
obj.put("myfloat", Float.MAX_VALUE);
obj.put("mydouble", Double.MAX_VALUE);
obj.put("myboolean", true);
obj.put("mybinary", TestUtils.randomByteArray(100));
obj.put("myinstant", Instant.now());
return obj;
}
}
| |
/**
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.waveprotocol.wave.model.document.operation.impl;
import org.waveprotocol.wave.model.document.operation.AnnotationBoundaryMap;
import org.waveprotocol.wave.model.document.operation.AnnotationBoundaryMapBuilder;
import org.waveprotocol.wave.model.document.operation.Attributes;
import org.waveprotocol.wave.model.document.operation.AttributesUpdate;
import org.waveprotocol.wave.model.document.operation.DocInitialization;
import org.waveprotocol.wave.model.document.operation.DocOp;
import org.waveprotocol.wave.model.document.operation.DocInitializationComponentType;
import org.waveprotocol.wave.model.document.operation.DocInitializationCursor;
import org.waveprotocol.wave.model.document.operation.DocOpComponentType;
import org.waveprotocol.wave.model.document.operation.DocOpCursor;
import org.waveprotocol.wave.model.document.operation.EvaluatingDocOpCursor;
import org.waveprotocol.wave.model.document.operation.algorithm.AnnotationsNormalizer;
import org.waveprotocol.wave.model.document.operation.algorithm.RangeNormalizer;
import org.waveprotocol.wave.model.document.operation.util.ExplodedDocOp;
import org.waveprotocol.wave.model.document.parser.AnnotationParser;
import org.waveprotocol.wave.model.document.parser.XmlParseException;
import org.waveprotocol.wave.model.document.parser.XmlParserFactory;
import org.waveprotocol.wave.model.document.parser.XmlPullParser;
import org.waveprotocol.wave.model.operation.OpCursorException;
import org.waveprotocol.wave.model.util.Pair;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
public class DocOpUtil {
private DocOpUtil() { /* utility class, not to be instantiated */ }
/**
* Name of the XML Processing Instruction used for annotations.
*
* Refer to this variable only. Do not use a string literal.
*/
public static final String PI_TARGET = "a";
public static DocInitialization asInitialization(final DocOp op) {
if (op instanceof DocInitialization) {
return (DocInitialization) op;
} else {
return new AbstractBufferedDocInitialization() {
@Override
public void apply(DocInitializationCursor c) {
op.apply(InitializationCursorAdapter.adapt(c));
}
@Override
public void applyComponent(int i, DocInitializationCursor c) {
op.applyComponent(i, InitializationCursorAdapter.adapt(c));
}
@Override
public AnnotationBoundaryMap getAnnotationBoundary(int i) {
return op.getAnnotationBoundary(i);
}
@Override
public String getCharactersString(int i) {
return op.getCharactersString(i);
}
@Override
public Attributes getElementStartAttributes(int i) {
return op.getElementStartAttributes(i);
}
@Override
public String getElementStartTag(int i) {
return op.getElementStartTag(i);
}
@Override
public DocInitializationComponentType getType(int i) {
DocOpComponentType t = op.getType(i);
if (t instanceof DocInitializationComponentType) {
return (DocInitializationComponentType) t;
} else {
throw new UnsupportedOperationException(
"Initialization with unexpected component " + t + ": " + op);
}
}
@Override
public int size() {
return op.size();
}
};
}
}
public static String toConciseString(DocOp op) {
final StringBuilder b = new StringBuilder();
op.apply(createConciseStringBuilder(op, b));
return b.toString();
}
public static DocOpCursor createConciseStringBuilder(DocOp op, final StringBuilder b) {
return new DocOpCursor() {
@Override
public void deleteCharacters(String chars) {
b.append("--" + literalString(chars) + "; ");
}
@Override
public void deleteElementEnd() {
b.append("x>; ");
}
@Override
public void deleteElementStart(String type, Attributes attrs) {
b.append("x< " + type + " " + toConciseString(attrs) + "; ");
}
@Override
public void replaceAttributes(Attributes oldAttrs, Attributes newAttrs) {
b.append("r@ " + toConciseString(oldAttrs) + " " + toConciseString(newAttrs) + "; ");
}
@Override
public void retain(int distance) {
b.append("__" + distance + "; ");
}
@Override
public void updateAttributes(AttributesUpdate attrUpdate) {
b.append("u@ " + toConciseString(attrUpdate) + "; ");
}
@Override
public void annotationBoundary(AnnotationBoundaryMap map) {
b.append("|| " + toConciseString(map) + "; ");
}
@Override
public void characters(String chars) {
b.append("++" + literalString(chars) + "; ");
}
@Override
public void elementEnd() {
b.append(">>; ");
}
@Override
public void elementStart(String type, Attributes attrs) {
b.append("<< " + type + " " + toConciseString(attrs) + "; ");
}
};
}
public static String toConciseString(Attributes attributes) {
if (attributes.isEmpty()) {
return "{}";
}
StringBuilder b = new StringBuilder();
b.append("{ ");
boolean first = true;
for (Map.Entry<String, String> entry : attributes.entrySet()) {
if (first) {
first = false;
} else {
b.append(", ");
}
b.append(entry.getKey());
b.append("=");
b.append(literalString(entry.getValue()));
}
b.append(" }");
return b.toString();
}
public static String toConciseString(AttributesUpdate update) {
if (update.changeSize() == 0) {
return "{}";
}
StringBuilder b = new StringBuilder();
b.append("{ ");
for (int i = 0; i < update.changeSize(); ++i) {
if (i > 0) {
b.append(", ");
}
b.append(update.getChangeKey(i));
b.append(": ");
b.append(literalString(update.getOldValue(i)));
b.append(" -> ");
b.append(literalString(update.getNewValue(i)));
}
b.append(" }");
return b.toString();
}
public static String toConciseString(AnnotationBoundaryMap map) {
StringBuilder b = new StringBuilder();
b.append("{ ");
boolean notEmpty = false;
for (int i = 0; i < map.endSize(); ++i) {
if (notEmpty) {
b.append(", ");
} else {
notEmpty = true;
}
b.append(literalString(map.getEndKey(i)));
}
for (int i = 0; i < map.changeSize(); ++i) {
if (notEmpty) {
b.append(", ");
} else {
notEmpty = true;
}
b.append(literalString(map.getChangeKey(i)));
b.append(": ");
b.append(literalString(map.getOldValue(i)));
b.append(" -> ");
b.append(literalString(map.getNewValue(i)));
}
b.append(" }");
return notEmpty ? b.toString() : "{}";
}
private static String escapeLiteral(String string) {
return string.replace("\\", "\\\\").replace("\"", "\\\"");
}
private static String literalString(String string) {
return (string == null) ? "null" : "\"" + escapeLiteral(string) + "\"";
}
/**
* Generates the minimal, normalised XML representation of a document
* initialisation.
*
* It is safe to use the return value of this method to compare the equality
* of two documents.
*
* @param op must be well-formed
* @return XML String representation, with annotations represented by a
* nonstandard processing instruction notation.
*/
public static String toXmlString(DocInitialization op) {
return toPrettyXmlString(op, -1);
}
/**
* Same as {@link #toXmlString(DocInitialization)}, but pretty-prints
* @param indent indent level
*/
public static String toPrettyXmlString(DocInitialization op, final int indent) {
//TODO(danilatos): Actually implement indent.
final StringBuilder b = new StringBuilder();
buildXmlString(op, indent, b);
return b.toString();
}
/**
* Variant of {@link #toPrettyXmlString(DocInitialization, int)} that accepts a
* StringBuilder instead
*/
public static void buildXmlString(DocInitialization op, final int indent,
final StringBuilder b) {
try {
op.apply(new DocInitializationCursor() {
Map<String, String> currentAnnotations = new HashMap<String, String>();
TreeMap<String, String> changes = new TreeMap<String, String>();
// Deque not supported by GWT :(
ArrayList<String> tags = new ArrayList<String>();
String elementPart;
@Override
public void annotationBoundary(AnnotationBoundaryMap map) {
changes.clear();
for (int i = 0; i < map.changeSize(); i++) {
String key = map.getChangeKey(i);
String value = map.getNewValue(i);
if (!equal(currentAnnotations.get(key), value)) {
// removal not necessary if null, get will return the same in either case.
currentAnnotations.put(key, value);
changes.put(key, value);
}
}
for (int i = 0; i < map.endSize(); i++) {
String key = map.getEndKey(i);
if (currentAnnotations.get(key) != null) {
currentAnnotations.remove(key);
changes.put(key, null);
}
}
if (changes.isEmpty()) {
return;
}
if (elementPart != null) {
b.append(elementPart + ">");
elementPart = null;
}
b.append("<?" + PI_TARGET);
for (Map.Entry<String, String> entry : changes.entrySet()) {
if (entry.getValue() != null) {
b.append(" \"" + xmlTextEscape(annotationEscape(entry.getKey())) + "\"");
b.append("=");
b.append("\"" + xmlTextEscape(annotationEscape(entry.getValue())) + "\"");
} else {
// This code renders ending annotations and annotations that are
// changed to null the same way, which is OK since we are
// only concerned with DocIntializations. (It's, in fact, the
// only correct solution since our test cases use this code for
// equality comparison of documents.)
b.append(" \"" + xmlTextEscape(annotationEscape(entry.getKey())) + "\"");
}
}
b.append("?>");
}
@Override
public void characters(String chars) {
if (elementPart != null) {
b.append(elementPart + ">");
elementPart = null;
}
b.append(xmlTextEscape(chars));
}
@Override
public void elementStart(String type, Attributes attrs) {
if (elementPart != null) {
b.append(elementPart + ">");
elementPart = null;
}
elementPart = "<" + type + (attrs.isEmpty() ? "" : " " + attributeString(attrs));
tags.add(type);
}
@Override
public void elementEnd() {
if (elementPart != null) {
b.append(elementPart + "/>");
elementPart = null;
assert tags.size() > 0;
tags.remove(tags.size() - 1);
} else {
String tag;
tag = tags.remove(tags.size() - 1);
b.append("</" + tag + ">");
}
}
private boolean equal(String a, String b) {
return a == null ? b == null : a.equals(b);
}
});
} catch (RuntimeException e) {
throw new RuntimeException("toXmlString: DocInitialization was probably ill-formed", e);
}
}
public static String debugToXmlString(DocInitialization op) {
try {
return toXmlString(op);
} catch (OpCursorException e) {
// This exception is probably due to some internal validity problem with the operation,
// e.g. a lazily evaluated compose implementation.
// Because this is similar to an OperationException, we should catch it and return
// something for debug purposes, rather than have the method simply crash.
// Append the identity hashCode to decrease the probability of two error return values
// being equal, in case they're being used for equality comparisons.
// They shouldn't, toXmlString() is better for this.
return "toXmlString: DocInitialization was internally broken. " +
"(" + Integer.toHexString(System.identityHashCode(op)) + ")";
}
}
public static String attributeString(Attributes attributes) {
StringBuilder b = new StringBuilder();
boolean first = true;
for (Map.Entry<String, String> e : attributes.entrySet()) {
if (first) {
first = false;
} else {
b.append(" ");
}
// We're just writing null with no quotes if the value is null.
// This is acceptable since it only occurs in updateAttributes,
// which is a processing instruction, so we define the format of it.
//
// TODO: We should escape ' and " and < and & etc. in the value.
b.append(e.getKey() + "=" + (e.getValue() == null ? "null"
: "\"" + xmlAttrEscape(e.getValue()) + "\""));
}
return b.toString();
}
/**
* Warning: escapes only the double quotation marks! (is that officially
* enough, if it is to be surrounded by double quotation marks?)
*
* @param attrValue
*/
public static String xmlAttrEscape(String attrValue) {
return attrValue
.replaceAll("\"", """);
}
public static String xmlTextEscape(String text) {
return text
.replaceAll("&", "&")
.replaceAll("<", "<")
.replaceAll(">", ">");
}
public static String annotationEscape(String value) {
return value
.replace("\\", "\\\\")
.replace("\"", "\\\"")
.replace("?", "\\q");
}
public static String xmlTextUnEscape(String escaped) {
return escaped
.replaceAll(">", ">")
.replaceAll("<", "<")
.replaceAll("&", "&");
}
public static String annotationUnEscape(String escaped) {
return escaped
.replace("\\q", "?")
.replace("\\\"", "\"")
.replace("\\\\", "\\");
}
public static DocOp normalize(DocOp in) {
EvaluatingDocOpCursor<DocOp> n = new AnnotationsNormalizer<DocOp>(
new RangeNormalizer<DocOp>(new DocOpBuffer()));
in.apply(n);
return n.finish();
}
public static DocInitialization normalize(DocInitialization in) {
EvaluatingDocOpCursor<DocOp> n = new AnnotationsNormalizer<DocOp>(
new RangeNormalizer<DocOp>(new DocOpBuffer()));
in.apply(n);
return asInitialization(n.finish());
}
/**
* Computes the number of items of the document that an op applies to, prior
* to its application.
*/
public static int initialDocumentLength(DocOp op) {
final int[] size = { 0 };
op.apply(new DocOpCursor() {
@Override
public void deleteCharacters(String chars) {
size[0] += chars.length();
}
@Override
public void deleteElementEnd() {
size[0]++;
}
@Override
public void deleteElementStart(String type, Attributes attrs) {
size[0]++;
}
@Override
public void replaceAttributes(Attributes oldAttrs, Attributes newAttrs) {
size[0]++;
}
@Override
public void retain(int itemCount) {
size[0] += itemCount;
}
@Override
public void updateAttributes(AttributesUpdate attrUpdate) {
size[0]++;
}
@Override
public void annotationBoundary(AnnotationBoundaryMap map) {
}
@Override
public void characters(String chars) {
}
@Override
public void elementEnd() {
}
@Override
public void elementStart(String type, Attributes attrs) {
}
});
return size[0];
}
/**
* Computes the number of items of the document that an op produces when
* applied.
*/
public static int resultingDocumentLength(DocOp op) {
final int[] size = { 0 };
op.apply(new DocOpCursor() {
@Override
public void deleteCharacters(String chars) {
}
@Override
public void deleteElementEnd() {
}
@Override
public void deleteElementStart(String type, Attributes attrs) {
}
@Override
public void replaceAttributes(Attributes oldAttrs, Attributes newAttrs) {
size[0]++;
}
@Override
public void retain(int itemCount) {
size[0] += itemCount;
}
@Override
public void updateAttributes(AttributesUpdate attrUpdate) {
size[0]++;
}
@Override
public void annotationBoundary(AnnotationBoundaryMap map) {
}
@Override
public void characters(String chars) {
size[0] += chars.length();
}
@Override
public void elementEnd() {
size[0]++;
}
@Override
public void elementStart(String type, Attributes attrs) {
size[0]++;
}
});
return size[0];
}
public static String[] visualiseOpWithDocument(final DocInitialization doc, final DocOp op) {
final StringBuilder docB = new StringBuilder();
final StringBuilder opB = new StringBuilder();
final StringBuilder indicesB = new StringBuilder();
final StringBuilder[] builders = { docB, opB, indicesB };
final DocInitialization exploded = ExplodedDocOp.explode(doc);
final int numDocComponents = exploded.size();
final DocOpCursor opStringifier = createConciseStringBuilder(op, opB);
final DocInitializationBuffer target = new DocInitializationBuffer();
new Runnable() {
int index = 0;
int docItem = 0;
private void runTarget(int itemCount) {
indicesB.append(docItem);
docItem += itemCount;
while (index < numDocComponents && itemCount > 0) {
exploded.applyComponent(index, target);
if (exploded.getType(index) != DocOpComponentType.ANNOTATION_BOUNDARY) {
itemCount--;
}
index++;
}
}
private void matchUp() {
int max = 0;
for (StringBuilder b : builders) {
max = Math.max(max, b.length());
}
for (StringBuilder b : builders) {
while (b.length() < max) {
b.append(' ');
}
}
}
@Override
public void run() {
op.apply(new DocOpCursor() {
@Override
public void deleteCharacters(String chars) {
opStringifier.deleteCharacters(chars);
runTarget(chars.length());
matchUp();
}
@Override
public void deleteElementEnd() {
opStringifier.deleteElementEnd();
runTarget(1);
matchUp();
}
@Override
public void deleteElementStart(String type, Attributes attrs) {
opStringifier.deleteElementStart(type, attrs);
runTarget(1);
matchUp();
}
@Override
public void replaceAttributes(Attributes oldAttrs, Attributes newAttrs) {
opStringifier.replaceAttributes(oldAttrs, newAttrs);
runTarget(1);
matchUp();
}
@Override
public void retain(int itemCount) {
opStringifier.retain(itemCount);
runTarget(itemCount);
matchUp();
}
@Override
public void updateAttributes(AttributesUpdate attrUpdate) {
opStringifier.updateAttributes(attrUpdate);
runTarget(1);
matchUp();
}
@Override
public void annotationBoundary(AnnotationBoundaryMap map) {
opStringifier.annotationBoundary(map);
matchUp();
}
@Override
public void characters(String chars) {
opStringifier.characters(chars);
matchUp();
}
@Override
public void elementEnd() {
opStringifier.elementEnd();
matchUp();
}
@Override
public void elementStart(String type, Attributes attrs) {
opStringifier.elementStart(type, attrs);
}
});
runTarget(1);
}
}.run();
buildXmlString(target.finish(), -1, docB);
return new String[] { docB.toString(), opB.toString(), indicesB.toString() };
}
/**
* The inverse of toXmlString. Takes an XML representation of a document
* initialization and returns a DocInitialization.
*
* @param text
* @return a DocInitialization
* @throws XmlParseException
*/
public static DocInitialization docInitializationFromXml(String text) throws XmlParseException {
XmlPullParser p = XmlParserFactory.unbuffered(text);
DocOpBuilder builder = new DocOpBuilder();
while (p.hasNext()) {
switch (p.next()) {
case START_ELEMENT:
builder.elementStart(p.getTagName(), AttributesImpl.fromStringMap(p.getAttributes()));
continue;
case END_ELEMENT:
builder.elementEnd();
continue;
case TEXT:
builder.characters(p.getText());
continue;
case PROCESSING_INSTRUCTION:
String name = p.getProcessingInstructionName();
AnnotationBoundaryMapBuilder anBuilder = new AnnotationBoundaryMapBuilder();
if (PI_TARGET.equals(name)) {
List<Pair<String, String>> parseAnnotations =
AnnotationParser.parseAnnotations(p.getProcessingInstructionValue());
for (Pair<String, String> ann : parseAnnotations) {
final String key = ann.first;
final String oldValue = null;
final String newValue = ann.second;
if (newValue == null) {
anBuilder.end(key);
} else {
anBuilder.change(key, oldValue, newValue);
}
}
builder.annotationBoundary(anBuilder.build());
}
continue;
}
}
DocOp op = builder.build();
return DocOpUtil.asInitialization(op);
}
}
| |
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.region.impl;
import java.io.BufferedInputStream;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import com.google.common.base.Charsets;
import com.google.common.base.MoreObjects;
import com.opengamma.OpenGammaRuntimeException;
import com.opengamma.core.id.ExternalSchemes;
import com.opengamma.core.region.RegionClassification;
import com.opengamma.master.region.ManageableRegion;
import com.opengamma.master.region.RegionDocument;
import com.opengamma.master.region.RegionMaster;
import com.opengamma.master.region.RegionSearchRequest;
import com.opengamma.master.region.RegionSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.i18n.Country;
import au.com.bytecode.opencsv.CSVReader;
/**
* Loads a CSV formatted UN/LOCODE file based on the regions in the holiday database.
* <p>
* This populates a region master.
*/
class UnLocodeRegionFileReader {
/**
* Path to the default regions file.
*/
private static final String REGIONS_RESOURCE = "/com/opengamma/region/UNLOCODE.csv";
/**
* Path to the list of locode regions to load.
*/
private static final String LOAD_RESOURCE = "/com/opengamma/master/region/impl/UnLocode.txt";
/**
* The region master to populate.
*/
private final RegionMaster _regionMaster;
/**
* Populates a region master.
*
* @param regionMaster the region master to populate, not null
* @return the master, not null
*/
static RegionMaster populate(final RegionMaster regionMaster) {
final InputStream stream = regionMaster.getClass().getResourceAsStream(REGIONS_RESOURCE);
final UnLocodeRegionFileReader reader = new UnLocodeRegionFileReader(regionMaster);
reader.parse(stream);
return regionMaster;
}
//-------------------------------------------------------------------------
/**
* Creates an instance with a master to populate.
*
* @param regionMaster the region master, not null
*/
UnLocodeRegionFileReader(final RegionMaster regionMaster) {
ArgumentChecker.notNull(regionMaster, "regionMaster");
_regionMaster = regionMaster;
}
//-------------------------------------------------------------------------
private void parse(final InputStream in) {
final InputStreamReader reader = new InputStreamReader(new BufferedInputStream(in), Charsets.UTF_8);
try {
parse(reader);
} finally {
IOUtils.closeQuietly(reader);
}
}
private void parse(final InputStreamReader reader) {
final Set<String> required = parseRequired();
final Set<ManageableRegion> regions = parseLocodes(reader, required);
coppClark(regions);
store(regions);
}
private Set<String> parseRequired() {
final InputStream stream = getClass().getResourceAsStream(LOAD_RESOURCE);
if (stream == null) {
throw new OpenGammaRuntimeException("Unable to find UnLocode.txt defining the UN/LOCODEs");
}
try {
final Set<String> lines = new HashSet<>(IOUtils.readLines(stream, "UTF-8"));
final Set<String> required = new HashSet<>();
for (String line : lines) {
line = StringUtils.trimToNull(line);
if (line != null) {
required.add(line);
}
}
return required;
} catch (final Exception ex) {
throw new OpenGammaRuntimeException("Unable to read UnLocode.txt defining the UN/LOCODEs");
} finally {
IOUtils.closeQuietly(stream);
}
}
private Set<ManageableRegion> parseLocodes(final Reader in, final Set<String> required) {
final Set<ManageableRegion> regions = new HashSet<>(1024, 0.75f);
String name = null;
try {
@SuppressWarnings("resource")
final
CSVReader reader = new CSVReader(in);
final int typeIdx = 0;
final int countryIsoIdx = 1;
final int unlocodePartIdx = 2;
final int nameColumnIdx = 4;
final int fullNameColumnIdx = 3;
String[] row = null;
while ((row = reader.readNext()) != null) {
if (row.length < 9) {
continue;
}
name = StringUtils.trimToNull(row[nameColumnIdx]);
final String type = StringUtils.trimToNull(row[typeIdx]);
String fullName = StringUtils.trimToNull(row[fullNameColumnIdx]);
fullName = MoreObjects.firstNonNull(fullName, name);
final String countryISO = StringUtils.trimToNull(row[countryIsoIdx]);
final String unlocodePart = StringUtils.trimToNull(row[unlocodePartIdx]);
final String unlocode = countryISO + unlocodePart;
if (StringUtils.isEmpty(name) || StringUtils.isEmpty(fullName) || StringUtils.isEmpty(countryISO)
|| StringUtils.isEmpty(unlocodePart) || unlocode.length() != 5
|| countryISO.equals("XZ") || "=".equals(type) || !required.remove(unlocode)) {
continue;
}
final ManageableRegion region = createRegion(name, fullName, countryISO);
region.addExternalId(ExternalSchemes.unLocode20102RegionId(unlocode));
regions.add(region);
}
} catch (final Exception ex) {
final String detail = name != null ? " while processing " + name : "";
throw new OpenGammaRuntimeException("Unable to read UN/LOCODEs" + detail, ex);
}
if (required.size() > 0) {
throw new OpenGammaRuntimeException("Requested UN/LOCODEs could not be found: " + required);
}
return regions;
}
private ManageableRegion createRegion(final String name, final String fullName, final String countryISO) {
final ManageableRegion region = new ManageableRegion();
region.setClassification(RegionClassification.MUNICIPALITY);
region.setName(name);
region.setFullName(fullName);
addParent(region, countryISO);
return region;
}
private void addParent(final ManageableRegion region, final String countryISO) {
final RegionSearchRequest request = new RegionSearchRequest();
request.addCountry(Country.of(countryISO));
final ManageableRegion parent = _regionMaster.search(request).getFirstRegion();
if (parent == null) {
throw new OpenGammaRuntimeException("Cannot find parent '" + countryISO + "' for '" + region.getName() + "'");
}
region.getParentRegionIds().add(parent.getUniqueId());
}
private void coppClark(final Set<ManageableRegion> regions) {
for (final ManageableRegion region : regions) {
final String unLocode = region.getExternalIdBundle().getValue(ExternalSchemes.UN_LOCODE_2010_2);
final String coppClarkLocode = COPP_CLARK_ALTERATIONS.get(unLocode);
if (coppClarkLocode != null) {
region.addExternalId(ExternalSchemes.coppClarkRegionId(coppClarkLocode));
if (!coppClarkLocode.substring(0, 2).equals(unLocode.substring(0, 2))) {
addParent(region, coppClarkLocode.substring(0, 2));
}
} else {
region.addExternalId(ExternalSchemes.coppClarkRegionId(unLocode));
}
}
for (final Entry<String, String> entry : COPP_CLARK_ADDITIONS.entrySet()) {
final ManageableRegion region = createRegion(entry.getValue(), entry.getValue(), entry.getKey().substring(0, 2));
region.addExternalId(ExternalSchemes.coppClarkRegionId(entry.getKey()));
regions.add(region);
}
}
private void store(final Set<ManageableRegion> regions) {
for (final ManageableRegion region : regions) {
final RegionDocument doc = new RegionDocument();
doc.setRegion(region);
final RegionSearchRequest request = new RegionSearchRequest();
request.addExternalIds(region.getExternalIdBundle());
final RegionSearchResult result = _regionMaster.search(request);
if (result.getDocuments().size() == 0) {
_regionMaster.add(doc);
} else {
final RegionDocument existing = result.getFirstDocument();
if (!existing.getRegion().getName().equals(doc.getRegion().getName())
|| !existing.getRegion().getFullName().equals(doc.getRegion().getFullName())) {
existing.getRegion().setName(doc.getRegion().getName());
existing.getRegion().setFullName(doc.getRegion().getFullName());
_regionMaster.update(existing);
}
}
}
}
//-------------------------------------------------------------------------
private static final Map<String, String> COPP_CLARK_ALTERATIONS = new HashMap<>();
static {
COPP_CLARK_ALTERATIONS.put("CNCAN", "CNXSA"); // Guangzhou (China)
COPP_CLARK_ALTERATIONS.put("GPMSB", "MFMGT"); // Marigot (Guadaloupe/St.Martin-MF)
COPP_CLARK_ALTERATIONS.put("GPGUS", "BLSTB"); // Gustavia (Guadaloupe/St.Barts-BL)
COPP_CLARK_ALTERATIONS.put("FIMHQ", "AXMHQ"); // Mariehamn (Finaland/Aland-AX)
COPP_CLARK_ALTERATIONS.put("FMPNI", "FMFSM"); // Pohnpei (Micronesia)
COPP_CLARK_ALTERATIONS.put("MSMNI", "MSMSR"); // Montserrat
};
private static final Map<String, String> COPP_CLARK_ADDITIONS = new HashMap<>();
static {
COPP_CLARK_ADDITIONS.put("PSPSE", "West Bank");
COPP_CLARK_ADDITIONS.put("LKMAT", "Matara");
COPP_CLARK_ADDITIONS.put("ILJRU", "Jerusalem");
};
}
| |
package com.owera.xaps.tr069;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.util.List;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.owera.common.db.ConnectionMetaData;
import com.owera.common.db.ConnectionPoolData;
import com.owera.common.db.ConnectionProperties;
import com.owera.common.db.ConnectionProvider;
import com.owera.common.util.Sleep;
import com.owera.xaps.Properties.Module;
import com.owera.xaps.base.BaseCache;
import com.owera.xaps.base.Log;
import com.owera.xaps.base.db.DBAccess;
import com.owera.xaps.base.http.Authenticator;
import com.owera.xaps.base.http.ThreadCounter;
import com.owera.xaps.dbi.ScriptExecutions;
import com.owera.xaps.dbi.SyslogConstants;
import com.owera.xaps.dbi.Unit;
import com.owera.xaps.dbi.XAPS;
import com.owera.xaps.dbi.XAPSUnit;
import com.owera.xaps.tr069.background.BackgroundProcesses;
import com.owera.xaps.tr069.background.ScheduledKickTask;
import com.owera.xaps.tr069.exception.TR069Exception;
import com.owera.xaps.tr069.exception.TR069ExceptionShortMessage;
import com.owera.xaps.tr069.methods.DecisionMaker;
import com.owera.xaps.tr069.methods.HTTPRequestProcessor;
import com.owera.xaps.tr069.methods.HTTPResponseCreator;
import com.owera.xaps.tr069.methods.TR069Method;
import com.owera.xaps.tr069.test.system1.TestDatabase;
import com.owera.xaps.tr069.test.system1.TestDatabaseObject;
import com.owera.xaps.tr069.test.system2.Util;
/**
* This is the "main-class" of TR069 Provisioning. It receives the HTTP-request
* from the CPE and returns an HTTP-response. The content of the request/reponse
* can be both TR-069 request/response.
*
* @author morten
*
*/
public class Provisioning extends HttpServlet {
private static final long serialVersionUID = -3020450686422484143L;
public static final String VERSION = "3.1.2";
// private static BackgroundProcesses backgroundProcesses = new BackgroundProcesses();
private static ScriptExecutions executions;
/**
* Starts background processes, initializes logging system
*/
static {
DBAccess.init(Module.TR069, SyslogConstants.FACILITY_TR069, VERSION);
com.owera.common.log.Log.initialize("xaps-tr069-logs.properties");
Log.notice(Provisioning.class, "Server starts...");
try {
BackgroundProcesses.initiate(DBAccess.getDBI());
} catch (Throwable t) {
Log.fatal(Provisioning.class, "Couldn't start BackgroundProcesses correctly ", t);
}
try {
executions = new ScriptExecutions(DBAccess.getXAPSProperties());
} catch (Throwable t) {
Log.fatal(Provisioning.class, "Couldn't initialize ScriptExecutions - not possible to run SHELL-jobs", t);
}
}
/**
* doGet prints some information about the server, focus on database connections and memory usage
*/
protected void doGet(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException {
if (req.getParameter("clearCache") != null)
BaseCache.clearCache();
PrintWriter pw = res.getWriter();
String html = "";
html += "<title>xAPS TR-069 Server Monitoring Page</title>";
html += "<h1>Monitoring of the TR-069 Server v. " + VERSION + "</h1>";
ConnectionProperties props = DBAccess.getXAPSProperties();
ConnectionPoolData poolData = ConnectionProvider.getConnectionPoolData(props);
if (poolData != null) {
html += "<h1>Database connection</h1>\n";
html += "This server is connected to " + poolData.getProps().getUrl() + " with user " + poolData.getProps().getUser() + "<br>\n";
ConnectionMetaData metaData = poolData.getMetaData().clone();
html += "<ul>Accessed : " + metaData.getAccessed() + "<br>\n";
html += "Retries : " + metaData.getRetries() + "<br>\n";
html += "Denied : " + metaData.getDenied() + "<br>\n";
html += "Denied % : " + metaData.calculateDeniedPercent() + "<br>\n";
html += "Free : " + poolData.getFreeConn().size() + "<br>\n";
html += "Currently used : " + poolData.getUsedConn().size() + "<br>\n";
html += "Used % : " + metaData.calculateUsedPercent() + "<br>\n<ul>";
int[] accessedSim = metaData.getAccessedSim();
for (int i = 1; i < accessedSim.length; i++) {
if (accessedSim[i] == 0 && accessedSim[i + 1] == 0 && accessedSim[i + 2] == 0)
break;
float percent = ((float) accessedSim[i] / metaData.getAccessed()) * 100f;
html += String.format("Used " + i + " connection(s) simultaneously: %8.5f", percent);
html += "% (accessed: " + accessedSim[i] + ")<br>\n";
}
html += "</ul>\n</ul><br>\n";
}
long total = Runtime.getRuntime().totalMemory();
long free = Runtime.getRuntime().freeMemory();
long used = total - free;
html += "<h1>Memory Usage</h1>\n";
html += "The JVM uses " + getBytesFormatted(used) + " of memory. " + getBytesFormatted(free) + " of memory available on the heap.<br>";
pw.print(html);
}
/**
* Reads the XML input into a string and store it in the SessionData object
* @param reqRes
* @return
* @throws TR069Exception
* @throws IOException
*/
private static long extractRequest(HTTPReqResData reqRes) throws TR069Exception {
try {
long tms = System.currentTimeMillis();
InputStreamReader isr = new InputStreamReader(reqRes.getReq().getInputStream());
BufferedReader br = new BufferedReader(isr);
StringBuilder requestSB = new StringBuilder(1000);
while (true) {
String line = br.readLine();
if (line == null)
break;
requestSB.append(line + "\n");
}
reqRes.getRequest().setXml(requestSB.toString());
return System.currentTimeMillis() - tms;
} catch (IOException e) {
throw new TR069Exception("TR-069 client aborted (not possible to read more input)", TR069ExceptionShortMessage.IOABORTED, e);
}
}
/**
* Some devices may send a CONTINUE header - server always reply "yes" - do continue
* @param req
* @param res
* @return
* @throws IOException
*/
@SuppressWarnings("unused")
private static boolean hasContinueHeader(HttpServletRequest req, HttpServletResponse res) throws IOException {
// Support 100 Continue header - always YES - CONTINUE!
if (req.getHeader("Expect") != null && req.getHeader("Expect").indexOf("100-continue") > -1) {
res.setStatus(HttpServletResponse.SC_CONTINUE);
res.getWriter().print("");
return true;
}
return false;
}
/**
* This is the entry point for TR-069 Clients - everything starts here!!!
*
* A TR-069 session consists of many rounds of HTTP request/responses, however
* each request/response non-the-less follows a standard pattern:
*
* 1. Check special HTTP headers for a "early return" (CONTINUE)
* 2. Check authentication - challenge client if necessary. If not authenticated - return
* 3. Check concurrent sessions from same unit - if detected: return
* 4. Extract XML from request - store in sessionData object
* 5. Process HTTP Request (xml-parsing, find methodname, test-verification)
* 6. Decide upon next step - may contain logic that processes the request and decide response
* 7. Produce HTTP Response (xml-creation)
* 8. Some details about the xml-response like content-type/Empty response
* 9. Return response to TR-069 client
*
* At the end we have error handling, to make sure that no matter what, we do return
* an EMTPY response to the client - to signal end of conversation/TR-069-session.
*
* In the finally loop we check if a TR-069 Session is in-fact completed (one way
* or the other) and if so, logging is performed. Also, if unit-parameters are queued
* up for writing, those will be written now (instead of writing some here and some there
* along the entire TR-069 session).
*
* In special cases the server will kick the device to "come back" and continue testing a new test case.
*
*/
protected void doPost(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException {
// 1. If HTTP CONTINUE header present, return "yes" and return - should be correct behavior - the client will return
// if (hasContinueHeader(req, res))
// return;
HTTPReqResData reqRes = null;
try {
// Create the main object which contains all objects concerning the entire
// session. This object also contains the SessionData object
reqRes = new HTTPReqResData(req, res);
// 2. Authenticate the client (first issue challenge, then authenticate)
if (!Authenticator.authenticate(reqRes))
return;
// 3. Do not continue if concurrent sessions from the same unit is on going
if (reqRes.getSessionData() != null && !ThreadCounter.isRequestAllowed(reqRes.getSessionData()))
return;
// 4. Read the request from the client - store in reqRes object
extractRequest(reqRes);
// 5.Process request (parsing xml/data)
HTTPRequestProcessor.processRequest(reqRes);
// 6. Decide next step in TR-069 session (sometimes trivial, sometimes complex)
DecisionMaker.process(reqRes);
// 7. Create TR-069 response
HTTPResponseCreator.createResponse(reqRes);
// 8. Set correct headers in response
if (reqRes.getResponse().getXml() != null && reqRes.getResponse().getXml().length() > 0) {
res.setHeader("SOAPAction", "");
res.setContentType("text/xml");
}
// 8. No need to send Content-length as it will only be informational for 204 HTTP messages
if (reqRes.getResponse().getMethod().equals("Empty"))
res.setStatus(HttpServletResponse.SC_NO_CONTENT);
// 9. Print response to output
res.getWriter().print(reqRes.getResponse().getXml());
} catch (Throwable t) {
// Make sure we return an EMPTY response to the TR-069 client
if (t instanceof TR069Exception) {
TR069Exception tex = (TR069Exception) t;
Throwable stacktraceThrowable = t;
if (tex.getCause() != null)
stacktraceThrowable = tex.getCause();
if (tex.getShortMsg() == TR069ExceptionShortMessage.MISC || tex.getShortMsg() == TR069ExceptionShortMessage.DATABASE)
Log.error(Provisioning.class, "An error ocurred: " + t.getMessage(), stacktraceThrowable);
if (tex.getShortMsg() == TR069ExceptionShortMessage.IOABORTED)
Log.warn(Provisioning.class, t.getMessage());
else
Log.error(Provisioning.class, t.getMessage()); // No stacktrace printed to log
}
if (reqRes != null)
reqRes.setThrowable(t);
res.setStatus(HttpServletResponse.SC_NO_CONTENT);
res.getWriter().print("");
} finally {
// Run at end of every TR-069 session
if (reqRes != null && endOfSession(reqRes)) {
Log.debug(Provisioning.class, "End of session is reached, will write queued unit parameters if unit (" + reqRes.getSessionData().getUnit() + ") is not null");
// Logging of the entire session, both to tr069-event.log and syslog
if (reqRes.getSessionData().getUnit() != null) {
// reqRes.getSessionData().getUnit().toWriteQueue(SystemParameters.PROVISIONING_STATE, ProvisioningState.READY.toString());
writeQueuedUnitParameters(reqRes);
}
SessionLogging.log(reqRes);
if (Util.testEnabled(reqRes, true))
initiateNewTestSession(reqRes);
else if (reqRes.getSessionData().isTestMode()) {
String row = TestDatabase.database.select(reqRes.getSessionData().getUnitId());
if (row != null && new TestDatabaseObject(row).getRun().equals("true"))
initiateNewTestSession(reqRes);
}
BaseCache.removeSessionData(reqRes.getSessionData().getUnitId());
BaseCache.removeSessionData(reqRes.getSessionData().getId());
res.setHeader("Connection", "close");
}
}
if (reqRes != null && reqRes.getSessionData() != null)
ThreadCounter.responseDelivered(reqRes.getSessionData());
}
private static void initiateNewTestSession(HTTPReqResData reqRes) {
try {
List<HTTPReqResData> reqResList = reqRes.getSessionData().getReqResList();
boolean deviceHasAlreadyBooted = false;
for (HTTPReqResData rr : reqResList) {
String method = rr.getResponse().getMethod();
// No need to kick device if a reboot or reset has been part of the test-flow
if (method != null && (method.equals(TR069Method.FACTORY_RESET) || method.equals(TR069Method.REBOOT)))
deviceHasAlreadyBooted = true;
}
if (!deviceHasAlreadyBooted) {
ScheduledKickTask.addUnit(reqRes.getSessionData().getUnit());
}
} catch (Throwable t) {
Log.warn(Provisioning.class, "Could not initiate kick after completed session in test mode", t);
}
}
private static void writeQueuedUnitParameters(HTTPReqResData reqRes) {
try {
Unit unit = reqRes.getSessionData().getUnit();
if (unit != null) {
XAPS xaps = reqRes.getSessionData().getDbAccess().getXaps();
XAPSUnit xapsUnit = DBAccess.getXAPSUnit(xaps);
xapsUnit.addOrChangeQueuedUnitParameters(unit);
}
} catch (Throwable t) {
Log.error(Provisioning.class, "An error occured when writing queued unit parameters to Fusion. May affect provisioning", t);
}
}
private static boolean endOfSession(HTTPReqResData reqRes) {
try {
SessionData sessionData = reqRes.getSessionData();
HTTPReqData reqData = reqRes.getRequest();
HTTPResData resData = reqRes.getResponse();
if (reqRes.getThrowable() != null)
return true;
if (reqData.getMethod() != null && resData != null && resData.getMethod().equals(TR069Method.EMPTY)) {
boolean terminationQuirk = Properties.isTerminationQuirk(sessionData);
if (terminationQuirk && reqData.getMethod().equals(TR069Method.EMPTY))
return true;
if (!terminationQuirk)
return true;
}
return false;
} catch (Throwable t) {
Log.warn(Provisioning.class, "An error occured when determining endOfSession. Does not affect provisioning", t);
return false;
}
}
private static String getBytesFormatted(long bytes) {
if (bytes > 1024 * 1024 * 1024)
return bytes / (1024 * 1024 * 1024) + " GB";
else if (bytes > 1024 * 1024)
return bytes / (1024 * 1024) + " MB";
else if (bytes > 1024)
return bytes / (1024) + " KB";
return bytes + " B";
}
public void destroy() {
Sleep.terminateApplication();
}
public static ScriptExecutions getExecutions() {
return executions;
}
}
| |
package org.apache.cassandra.db;
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
import java.io.File;
import java.io.IOError;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import org.apache.cassandra.cql3.QueryProcessor;
import org.apache.cassandra.exceptions.RequestExecutionException;
import org.apache.cassandra.utils.UUIDGen;
import org.apache.commons.lang3.StringUtils;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.apache.cassandra.OrderedJUnit4ClassRunner;
import org.apache.cassandra.cql3.UntypedResultSet;
import org.apache.cassandra.SchemaLoader;
import org.apache.cassandra.Util;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.Schema;
import org.apache.cassandra.db.columniterator.IdentityQueryFilter;
import org.apache.cassandra.db.compaction.CompactionManager;
import org.apache.cassandra.db.compaction.Scrubber;
import org.apache.cassandra.exceptions.WriteTimeoutException;
import org.apache.cassandra.io.sstable.Component;
import org.apache.cassandra.io.sstable.Descriptor;
import org.apache.cassandra.io.sstable.SSTableReader;
import org.apache.cassandra.utils.ByteBufferUtil;
import static org.apache.cassandra.Util.cellname;
import static org.apache.cassandra.Util.column;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
@RunWith(OrderedJUnit4ClassRunner.class)
public class ScrubTest extends SchemaLoader
{
public String KEYSPACE = "Keyspace1";
public String CF = "Standard1";
public String CF3 = "Standard2";
public String COUNTER_CF = "Counter1";
@Test
public void testScrubOneRow() throws ExecutionException, InterruptedException
{
CompactionManager.instance.disableAutoCompaction();
Keyspace keyspace = Keyspace.open(KEYSPACE);
ColumnFamilyStore cfs = keyspace.getColumnFamilyStore(CF);
cfs.clearUnsafe();
List<Row> rows;
// insert data and verify we get it back w/ range query
fillCF(cfs, 1);
rows = cfs.getRangeSlice(Util.range("", ""), null, new IdentityQueryFilter(), 1000);
assertEquals(1, rows.size());
CompactionManager.instance.performScrub(cfs, false);
// check data is still there
rows = cfs.getRangeSlice(Util.range("", ""), null, new IdentityQueryFilter(), 1000);
assertEquals(1, rows.size());
}
@Test
public void testScrubCorruptedCounterRow() throws IOException, WriteTimeoutException
{
CompactionManager.instance.disableAutoCompaction();
Keyspace keyspace = Keyspace.open(KEYSPACE);
ColumnFamilyStore cfs = keyspace.getColumnFamilyStore(COUNTER_CF);
cfs.clearUnsafe();
fillCounterCF(cfs, 2);
List<Row> rows = cfs.getRangeSlice(Util.range("", ""), null, new IdentityQueryFilter(), 1000);
assertEquals(2, rows.size());
SSTableReader sstable = cfs.getSSTables().iterator().next();
// overwrite one row with garbage
long row0Start = sstable.getPosition(RowPosition.ForKey.get(ByteBufferUtil.bytes("0"), sstable.partitioner), SSTableReader.Operator.EQ).position;
long row1Start = sstable.getPosition(RowPosition.ForKey.get(ByteBufferUtil.bytes("1"), sstable.partitioner), SSTableReader.Operator.EQ).position;
long startPosition = row0Start < row1Start ? row0Start : row1Start;
long endPosition = row0Start < row1Start ? row1Start : row0Start;
RandomAccessFile file = new RandomAccessFile(sstable.getFilename(), "rw");
file.seek(startPosition);
file.writeBytes(StringUtils.repeat('z', (int) (endPosition - startPosition)));
file.close();
// with skipCorrupted == false, the scrub is expected to fail
Scrubber scrubber = new Scrubber(cfs, sstable, false, false);
try
{
scrubber.scrub();
fail("Expected a CorruptSSTableException to be thrown");
}
catch (IOError err) {}
// with skipCorrupted == true, the corrupt row will be skipped
scrubber = new Scrubber(cfs, sstable, true, false);
scrubber.scrub();
scrubber.close();
assertEquals(1, cfs.getSSTables().size());
// verify that we can read all of the rows, and there is now one less row
rows = cfs.getRangeSlice(Util.range("", ""), null, new IdentityQueryFilter(), 1000);
assertEquals(1, rows.size());
}
@Test
public void testScrubDeletedRow() throws ExecutionException, InterruptedException
{
CompactionManager.instance.disableAutoCompaction();
Keyspace keyspace = Keyspace.open(KEYSPACE);
ColumnFamilyStore cfs = keyspace.getColumnFamilyStore(CF3);
cfs.clearUnsafe();
ColumnFamily cf = ArrayBackedSortedColumns.factory.create(KEYSPACE, CF3);
cf.delete(new DeletionInfo(0, 1)); // expired tombstone
Mutation rm = new Mutation(KEYSPACE, ByteBufferUtil.bytes(1), cf);
rm.applyUnsafe();
cfs.forceBlockingFlush();
CompactionManager.instance.performScrub(cfs, false);
assert cfs.getSSTables().isEmpty();
}
@Test
public void testScrubMultiRow() throws ExecutionException, InterruptedException
{
CompactionManager.instance.disableAutoCompaction();
Keyspace keyspace = Keyspace.open(KEYSPACE);
ColumnFamilyStore cfs = keyspace.getColumnFamilyStore(CF);
cfs.clearUnsafe();
List<Row> rows;
// insert data and verify we get it back w/ range query
fillCF(cfs, 10);
rows = cfs.getRangeSlice(Util.range("", ""), null, new IdentityQueryFilter(), 1000);
assertEquals(10, rows.size());
CompactionManager.instance.performScrub(cfs, false);
// check data is still there
rows = cfs.getRangeSlice(Util.range("", ""), null, new IdentityQueryFilter(), 1000);
assertEquals(10, rows.size());
}
@Test
public void testScrubOutOfOrder() throws Exception
{
CompactionManager.instance.disableAutoCompaction();
Keyspace keyspace = Keyspace.open(KEYSPACE);
String columnFamily = "Standard3";
ColumnFamilyStore cfs = keyspace.getColumnFamilyStore(columnFamily);
cfs.clearUnsafe();
/*
* Code used to generate an outOfOrder sstable. The test for out-of-order key in SSTableWriter must also be commented out.
* The test also assumes an ordered partitioner.
*
ColumnFamily cf = ArrayBackedSortedColumns.factory.create(cfs.metadata);
cf.addColumn(new Cell(ByteBufferUtil.bytes("someName"), ByteBufferUtil.bytes("someValue"), 0L));
SSTableWriter writer = new SSTableWriter(cfs.getTempSSTablePath(new File(System.getProperty("corrupt-sstable-root"))),
cfs.metadata.getIndexInterval(),
cfs.metadata,
cfs.partitioner,
SSTableMetadata.createCollector(BytesType.instance));
writer.append(Util.dk("a"), cf);
writer.append(Util.dk("b"), cf);
writer.append(Util.dk("z"), cf);
writer.append(Util.dk("c"), cf);
writer.append(Util.dk("y"), cf);
writer.append(Util.dk("d"), cf);
writer.closeAndOpenReader();
*/
String root = System.getProperty("corrupt-sstable-root");
assert root != null;
File rootDir = new File(root);
assert rootDir.isDirectory();
Descriptor desc = new Descriptor(new Descriptor.Version("jb"), rootDir, KEYSPACE, columnFamily, 1, Descriptor.Type.FINAL);
CFMetaData metadata = Schema.instance.getCFMetaData(desc.ksname, desc.cfname);
try
{
SSTableReader.open(desc, metadata);
fail("SSTR validation should have caught the out-of-order rows");
}
catch (IllegalStateException ise) { /* this is expected */ }
// open without validation for scrubbing
Set<Component> components = new HashSet<>();
components.add(Component.COMPRESSION_INFO);
components.add(Component.DATA);
components.add(Component.PRIMARY_INDEX);
components.add(Component.FILTER);
components.add(Component.STATS);
components.add(Component.SUMMARY);
components.add(Component.TOC);
SSTableReader sstable = SSTableReader.openNoValidation(desc, components, metadata);
Scrubber scrubber = new Scrubber(cfs, sstable, false, true);
scrubber.scrub();
cfs.loadNewSSTables();
List<Row> rows = cfs.getRangeSlice(Util.range("", ""), null, new IdentityQueryFilter(), 1000);
assert isRowOrdered(rows) : "Scrub failed: " + rows;
assert rows.size() == 6 : "Got " + rows.size();
}
private static boolean isRowOrdered(List<Row> rows)
{
DecoratedKey prev = null;
for (Row row : rows)
{
if (prev != null && prev.compareTo(row.key) > 0)
return false;
prev = row.key;
}
return true;
}
protected void fillCF(ColumnFamilyStore cfs, int rowsPerSSTable)
{
for (int i = 0; i < rowsPerSSTable; i++)
{
String key = String.valueOf(i);
// create a row and update the birthdate value, test that the index query fetches the new version
ColumnFamily cf = ArrayBackedSortedColumns.factory.create(KEYSPACE, CF);
cf.addColumn(column("c1", "1", 1L));
cf.addColumn(column("c2", "2", 1L));
Mutation rm = new Mutation(KEYSPACE, ByteBufferUtil.bytes(key), cf);
rm.applyUnsafe();
}
cfs.forceBlockingFlush();
}
protected void fillCounterCF(ColumnFamilyStore cfs, int rowsPerSSTable) throws WriteTimeoutException
{
for (int i = 0; i < rowsPerSSTable; i++)
{
String key = String.valueOf(i);
ColumnFamily cf = ArrayBackedSortedColumns.factory.create(KEYSPACE, COUNTER_CF);
Mutation rm = new Mutation(KEYSPACE, ByteBufferUtil.bytes(key), cf);
rm.addCounter(COUNTER_CF, cellname("Column1"), 100);
CounterMutation cm = new CounterMutation(rm, ConsistencyLevel.ONE);
cm.apply();
}
cfs.forceBlockingFlush();
}
@Test
public void testScrubColumnValidation() throws InterruptedException, RequestExecutionException, ExecutionException
{
QueryProcessor.process("CREATE TABLE \"Keyspace1\".test_compact_static_columns (a bigint, b timeuuid, c boolean static, d text, PRIMARY KEY (a, b))", ConsistencyLevel.ONE);
Keyspace keyspace = Keyspace.open("Keyspace1");
ColumnFamilyStore cfs = keyspace.getColumnFamilyStore("test_compact_static_columns");
QueryProcessor.executeInternal("INSERT INTO \"Keyspace1\".test_compact_static_columns (a, b, c, d) VALUES (123, c3db07e8-b602-11e3-bc6b-e0b9a54a6d93, true, 'foobar')");
cfs.forceBlockingFlush();
CompactionManager.instance.performScrub(cfs, false);
}
/**
* Tests CASSANDRA-6892 (key aliases being used improperly for validation)
*/
@Test
public void testColumnNameEqualToDefaultKeyAlias() throws ExecutionException, InterruptedException
{
Keyspace keyspace = Keyspace.open("Keyspace1");
ColumnFamilyStore cfs = keyspace.getColumnFamilyStore("UUIDKeys");
ColumnFamily cf = ArrayBackedSortedColumns.factory.create("Keyspace1", "UUIDKeys");
cf.addColumn(column(CFMetaData.DEFAULT_KEY_ALIAS, "not a uuid", 1L));
Mutation mutation = new Mutation("Keyspace1", ByteBufferUtil.bytes(UUIDGen.getTimeUUID()), cf);
mutation.applyUnsafe();
cfs.forceBlockingFlush();
CompactionManager.instance.performScrub(cfs, false);
assertEquals(1, cfs.getSSTables().size());
}
/**
* For CASSANDRA-6892 too, check that for a compact table with one cluster column, we can insert whatever
* we want as value for the clustering column, including something that would conflict with a CQL column definition.
*/
@Test
public void testValidationCompactStorage() throws Exception
{
QueryProcessor.process("CREATE TABLE \"Keyspace1\".test_compact_dynamic_columns (a int, b text, c text, PRIMARY KEY (a, b)) WITH COMPACT STORAGE", ConsistencyLevel.ONE);
Keyspace keyspace = Keyspace.open("Keyspace1");
ColumnFamilyStore cfs = keyspace.getColumnFamilyStore("test_compact_dynamic_columns");
QueryProcessor.executeInternal("INSERT INTO \"Keyspace1\".test_compact_dynamic_columns (a, b, c) VALUES (0, 'a', 'foo')");
QueryProcessor.executeInternal("INSERT INTO \"Keyspace1\".test_compact_dynamic_columns (a, b, c) VALUES (0, 'b', 'bar')");
QueryProcessor.executeInternal("INSERT INTO \"Keyspace1\".test_compact_dynamic_columns (a, b, c) VALUES (0, 'c', 'boo')");
cfs.forceBlockingFlush();
CompactionManager.instance.performScrub(cfs, true);
// Scrub is silent, but it will remove broken records. So reading everything back to make sure nothing to "scrubbed away"
UntypedResultSet rs = QueryProcessor.executeInternal("SELECT * FROM \"Keyspace1\".test_compact_dynamic_columns");
assertEquals(3, rs.size());
Iterator<UntypedResultSet.Row> iter = rs.iterator();
assertEquals("foo", iter.next().getString("c"));
assertEquals("bar", iter.next().getString("c"));
assertEquals("boo", iter.next().getString("c"));
}
}
| |
/*
* Copyright (c) 2019, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.developerstudio.eclipse.esb.synapse.unit.test.wizard.inputproperty;
import org.apache.commons.lang3.text.WordUtils;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.wizard.WizardPage;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Font;
import org.eclipse.swt.graphics.FontData;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Combo;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.IWorkbench;
import org.wso2.developerstudio.eclipse.esb.synapse.unit.test.model.InputProperty;
import org.wso2.developerstudio.eclipse.esb.synapse.unit.test.model.SynapseUnitTest;
import org.wso2.developerstudio.eclipse.esb.synapse.unit.test.model.TestCase;
import org.wso2.developerstudio.eclipse.esb.synapse.unit.test.utils.ComboItems;
public class InputPropertyDetailPage extends WizardPage {
private static final String DIALOG_TITLE = "Synapse Unit Test - Input Property Details";
private static final String DIALOG_DESCRIPTION = "Enter Message Context, Axis2 or Transport level properties ";
private static final String EMPTY_STRING = "";
private static final String PROPERTY_SCOPE = "Property Scope:";
private static final String PROPERTY_NAME = "Property Name:";
private static final String PROPERTY_VALUE = "Property Value:";
private String propertyName = EMPTY_STRING;
private String propertyValue = EMPTY_STRING;
private String propertyScope = EMPTY_STRING;
private Text txtPropertyName;
private Text txtPropertyValue;
private boolean isPageDirty;
private SynapseUnitTest synapseTestDataHolder;
/**
* Class constructor.
*/
protected InputPropertyDetailPage(IWorkbench wb, IStructuredSelection selection, SynapseUnitTest synapseTestData) {
super(DIALOG_TITLE);
setTitle(DIALOG_TITLE);
setDescription(DIALOG_DESCRIPTION);
this.synapseTestDataHolder = synapseTestData;
// check page loading for update status
if (synapseTestData.isPropertyUpdate()) {
String propertyKey = synapseTestData.getSelectedPropertyKey();
InputProperty inputProperty;
TestCase selectedTestCase = null;
if (synapseTestData.isTestCaseUpdate()) {
int testCaseKey = synapseTestData.getSelectedTestCaseKey();
selectedTestCase = synapseTestData.getSelectedTestCase(testCaseKey);
}
if (selectedTestCase != null && selectedTestCase.getPropertyMap().containsKey(propertyKey)) {
inputProperty = selectedTestCase.getSelectedProperty(propertyKey);
} else {
inputProperty = synapseTestData.getSelectedTemporalProperty(propertyKey);
}
fillUpdatingPropertyData(inputProperty);
}
}
@Override
public void createControl(Composite parent) {
Composite container = new Composite(parent, SWT.NULL);
setControl(container);
container.setLayout(new FormLayout());
FormData data;
Label lblPropertyScope = new Label(container, SWT.NONE);
data = new FormData();
data.top = new FormAttachment(10);
data.left = new FormAttachment(3);
data.width = 160;
lblPropertyScope.setLayoutData(data);
lblPropertyScope.setText(PROPERTY_SCOPE);
final String[] PROP_LEVELS = ComboItems.getPropItems(synapseTestDataHolder.getTestArtifactType());
final String[] PROP_NAME_ITEMS = ComboItems.getPropItemNames(synapseTestDataHolder.getTestArtifactType());
Combo combo = new Combo(container, SWT.READ_ONLY | SWT.DROP_DOWN);
data = new FormData();
data.top = new FormAttachment(10);
data.left = new FormAttachment(lblPropertyScope, 0);
data.right = new FormAttachment(97);
data.width = 400;
combo.setItems(PROP_LEVELS);
combo.setLayoutData(data);
if (!getPropertyScope().isEmpty()) {
combo.setText(WordUtils.capitalizeFully(getPropertyScope()));
} else {
combo.setText(PROP_LEVELS[0]);
setPropertyScope(PROP_LEVELS[0]);
}
Label lblPropertyName = new Label(container, SWT.NONE);
data = new FormData();
data.top = new FormAttachment(combo, 12);
data.left = new FormAttachment(3);
data.width = 160;
lblPropertyName.setLayoutData(data);
lblPropertyName.setText(PROPERTY_NAME);
Label lblPropertyPrefixName = new Label(container, SWT.NONE);
data = new FormData();
data.top = new FormAttachment(combo, 12);
data.left = new FormAttachment(lblPropertyName, 3);
data.width = 46;
data.height = 21;
lblPropertyPrefixName.setLayoutData(data);
lblPropertyPrefixName.setText(" " + PROP_NAME_ITEMS[0]);
lblPropertyPrefixName.setBackground(new Color(null, 229, 236, 253));
Font boldFont = new Font(null, new FontData("Arial", 12, SWT.BOLD));
lblPropertyPrefixName.setFont(boldFont);
txtPropertyName = new Text(container, SWT.BORDER);
data = new FormData();
data.top = new FormAttachment(combo, 12);
data.left = new FormAttachment(lblPropertyPrefixName, 0);
data.right = new FormAttachment(97);
data.width = 400;
txtPropertyName.setLayoutData(data);
if (!getPropertyName().isEmpty()) {
txtPropertyName.setText(getPropertyName());
}
combo.addSelectionListener(new SelectionListener() {
public void widgetSelected(SelectionEvent e) {
setPropertyScope(combo.getText());
validate();
if (getPropertyScope().equals(PROP_LEVELS[0])) {
lblPropertyPrefixName.setText(" " + PROP_NAME_ITEMS[0]);
} else if (getPropertyScope().equals(PROP_LEVELS[1])) {
lblPropertyPrefixName.setText(" " + PROP_NAME_ITEMS[1]);
} else if (getPropertyScope().equals(PROP_LEVELS[2])) {
lblPropertyPrefixName.setText(" " + PROP_NAME_ITEMS[2]);
}
}
public void widgetDefaultSelected(SelectionEvent e) {
}
});
txtPropertyName.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent arg0) {
setPropertyName(txtPropertyName.getText());
validate();
}
});
Label lblPropertyValue = new Label(container, SWT.NONE);
data = new FormData();
data.top = new FormAttachment(lblPropertyName, 20);
data.left = new FormAttachment(3);
data.width = 160;
lblPropertyValue.setLayoutData(data);
lblPropertyValue.setText(PROPERTY_VALUE);
txtPropertyValue = new Text(container, SWT.BORDER);
data = new FormData();
data.top = new FormAttachment(lblPropertyName, 20);
data.left = new FormAttachment(lblPropertyValue, 0);
data.right = new FormAttachment(97);
data.width = 400;
txtPropertyValue.setLayoutData(data);
txtPropertyValue.setLayoutData(data);
if (!getPropertyValue().isEmpty()) {
txtPropertyValue.setText(getPropertyValue());
}
txtPropertyValue.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent arg0) {
setPropertyValue(txtPropertyValue.getText());
validate();
}
});
setPageComplete(false);
}
/**
* Method for validations of wizard page. If validation fails set page as dirty
* and not complete
*/
private void validate() {
if (getPropertyScope() == null || getPropertyScope().isEmpty()) {
setErrorMessage("Please specify a scope for the property");
setPageComplete(false);
setPageDirty(true);
return;
} else if (getPropertyName() == null || getPropertyName().isEmpty()) {
setErrorMessage("Please specify a name for the property");
setPageComplete(false);
setPageDirty(true);
return;
} else if (getPropertyValue() == null || getPropertyValue().isEmpty()) {
setErrorMessage("Please specify a value for the property");
setPageComplete(false);
setPageDirty(true);
return;
}
setPageDirty(false);
setErrorMessage(null);
setPageComplete(true);
}
/**
* Method for filling updating data to wizard page.
*
* @param inputProperty
* input-property data
*/
private void fillUpdatingPropertyData(InputProperty inputProperty) {
setPropertyName(inputProperty.getName());
setPropertyScope(inputProperty.getScope());
setPropertyValue(inputProperty.getValue());
}
public String getPropertyName() {
return propertyName;
}
public void setPropertyName(String propertyName) {
this.propertyName = propertyName;
}
public String getPropertyValue() {
return propertyValue;
}
public void setPropertyValue(String propertyValue) {
this.propertyValue = propertyValue;
}
public String getPropertyScope() {
return propertyScope;
}
public void setPropertyScope(String propertyScope) {
this.propertyScope = propertyScope;
}
public boolean isPageDirty() {
return isPageDirty;
}
public void setPageDirty(boolean isPageDirty) {
this.isPageDirty = isPageDirty;
}
}
| |
package org.sagebionetworks.repo.web.service;
import java.io.IOException;
import java.util.Date;
import javax.servlet.http.HttpServletRequest;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.sagebionetworks.ids.IdGenerator;
import org.sagebionetworks.repo.manager.SemaphoreManager;
import org.sagebionetworks.repo.manager.UserManager;
import org.sagebionetworks.repo.manager.doi.DoiAdminManager;
import org.sagebionetworks.repo.manager.feature.FeatureManager;
import org.sagebionetworks.repo.manager.message.MessageSyndication;
import org.sagebionetworks.repo.manager.password.PasswordValidator;
import org.sagebionetworks.repo.manager.stack.StackStatusManager;
import org.sagebionetworks.repo.manager.table.TableManagerSupport;
import org.sagebionetworks.repo.model.DatastoreException;
import org.sagebionetworks.repo.model.EntityId;
import org.sagebionetworks.repo.model.ObjectType;
import org.sagebionetworks.repo.model.UnauthorizedException;
import org.sagebionetworks.repo.model.UserInfo;
import org.sagebionetworks.repo.model.auth.NewIntegrationTestUser;
import org.sagebionetworks.repo.model.auth.NewUser;
import org.sagebionetworks.repo.model.dbo.dao.DBOChangeDAO;
import org.sagebionetworks.repo.model.dbo.persistence.DBOCredential;
import org.sagebionetworks.repo.model.dbo.persistence.DBOSessionToken;
import org.sagebionetworks.repo.model.dbo.persistence.DBOTermsOfUseAgreement;
import org.sagebionetworks.repo.model.entity.IdAndVersion;
import org.sagebionetworks.repo.model.feature.Feature;
import org.sagebionetworks.repo.model.feature.FeatureStatus;
import org.sagebionetworks.repo.model.message.ChangeMessages;
import org.sagebionetworks.repo.model.message.FireMessagesResult;
import org.sagebionetworks.repo.model.message.PublishResults;
import org.sagebionetworks.repo.model.message.TransactionSynchronizationProxy;
import org.sagebionetworks.repo.model.migration.IdGeneratorExport;
import org.sagebionetworks.repo.model.status.StackStatus;
import org.sagebionetworks.repo.web.NotFoundException;
import org.sagebionetworks.repo.web.controller.ObjectTypeSerializer;
import org.sagebionetworks.securitytools.PBKDF2Utils;
import org.sagebionetworks.util.ValidateArgument;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpHeaders;
/**
* This controller is used for Administration of Synapse.
*
* @author John
*/
public class AdministrationServiceImpl implements AdministrationService {
static private Logger log = LogManager.getLogger(AdministrationServiceImpl.class);
@Autowired
private ObjectTypeSerializer objectTypeSerializer;
@Autowired
private UserManager userManager;
@Autowired
private StackStatusManager stackStatusManager;
@Autowired
private MessageSyndication messageSyndication;
@Autowired
private DoiAdminManager doiAdminManager;
@Autowired
SemaphoreManager semaphoreManager;
@Autowired
TableManagerSupport tableManagerSupport;
@Autowired
private DBOChangeDAO changeDAO;
@Autowired
IdGenerator idGenerator;
@Autowired
TransactionSynchronizationProxy transactionSynchronizationManager;
@Autowired
PasswordValidator passwordValidator;
@Autowired
FeatureManager featureManager;
/* (non-Javadoc)
* @see org.sagebionetworks.repo.web.service.AdministrationService#getStackStatus(java.lang.String, org.springframework.http.HttpHeaders, javax.servlet.http.HttpServletRequest)
*/
@Override
public StackStatus getStackStatus() {
// Get the status of this daemon
return stackStatusManager.getCurrentStatus();
}
/* (non-Javadoc)
* @see org.sagebionetworks.repo.web.service.AdministrationService#updateStatusStackStatus(java.lang.String, org.springframework.http.HttpHeaders, javax.servlet.http.HttpServletRequest)
*/
@Override
public StackStatus updateStatusStackStatus(Long userId,
HttpHeaders header, HttpServletRequest request)
throws DatastoreException, NotFoundException, UnauthorizedException, IOException {
// Get the status of this daemon
StackStatus updatedValue = objectTypeSerializer.deserialize(request.getInputStream(), header, StackStatus.class, header.getContentType());
// Get the user
UserInfo userInfo = userManager.getUserInfo(userId);
return stackStatusManager.updateStatus(userInfo, updatedValue);
}
@Override
public ChangeMessages listChangeMessages(Long userId, Long startChangeNumber, ObjectType type, Long limit) throws DatastoreException, NotFoundException {
adminCheck(userId);
return messageSyndication.listChanges(startChangeNumber, type, limit);
}
@Override
public PublishResults rebroadcastChangeMessagesToQueue(Long userId, String queueName, Long startChangeNumber, ObjectType type, Long limit) throws DatastoreException, NotFoundException {
adminCheck(userId);
return messageSyndication.rebroadcastChangeMessagesToQueue(queueName, type, startChangeNumber, limit);
}
@Override
public FireMessagesResult reFireChangeMessages(Long userId, Long startChangeNumber, Long limit) throws DatastoreException, NotFoundException {
adminCheck(userId);
long lastMsgNum = messageSyndication.rebroadcastChangeMessages(startChangeNumber, limit);
FireMessagesResult res = new FireMessagesResult();
res.setNextChangeNumber(lastMsgNum);
return res;
}
void adminCheck(Long userId) {
ValidateArgument.required(userId, "userid");
UserInfo userInfo = userManager.getUserInfo(userId);
if (!userInfo.isAdmin()) {
throw new UnauthorizedException("Only an administrator may access this service.");
}
}
@Override
public void clearDoi(Long userId) throws NotFoundException, UnauthorizedException, DatastoreException {
doiAdminManager.clear(userId);
}
@Override
public FireMessagesResult getCurrentChangeNumber(Long userId) throws DatastoreException, NotFoundException {
adminCheck(userId);
long lastChgNum = messageSyndication.getCurrentChangeNumber();
FireMessagesResult res = new FireMessagesResult();
res.setNextChangeNumber(lastChgNum);
return res;
}
@Override
public EntityId createOrGetTestUser(Long userId, NewIntegrationTestUser userSpecs) throws NotFoundException {
adminCheck(userId);
UserInfo userInfo = userManager.getUserInfo(userId);
DBOCredential cred = new DBOCredential();
DBOTermsOfUseAgreement touAgreement = null;
DBOSessionToken token = null;
if (userSpecs.getPassword() != null) {
passwordValidator.validatePassword(userSpecs.getPassword());
cred.setPassHash(PBKDF2Utils.hashPassword(userSpecs.getPassword(), null));
}
if (userSpecs.getSession() != null) {
Date date = new Date();
touAgreement = new DBOTermsOfUseAgreement();
touAgreement.setAgreesToTermsOfUse(userSpecs.getSession().getAcceptsTermsOfUse());
token = new DBOSessionToken();
token.setSessionToken(userSpecs.getSession().getSessionToken());
token.setValidatedOn(date);
}
NewUser nu = new NewUser();
nu.setEmail(userSpecs.getEmail());
nu.setUserName(userSpecs.getUsername());
UserInfo user = userManager.createOrGetTestUser(userInfo, nu, cred, touAgreement, token);
EntityId id = new EntityId();
id.setId(user.getId().toString());
return id;
}
@Override
public void deleteUser(Long userId, String id) throws NotFoundException {
UserInfo userInfo = userManager.getUserInfo(userId);
userManager.deletePrincipal(userInfo, Long.parseLong(id));
}
@Override
public void rebuildTable(Long userId, String tableId) throws NotFoundException, IOException {
UserInfo userInfo = userManager.getUserInfo(userId);
IdAndVersion idAndVersion = IdAndVersion.parse(tableId);
tableManagerSupport.rebuildTable(userInfo, idAndVersion);
}
@Override
public void clearAllLocks(Long userId) throws NotFoundException {
UserInfo userInfo = userManager.getUserInfo(userId);
// Ony an admin can make this call
semaphoreManager.releaseAllLocksAsAdmin(userInfo);
}
@Override
public ChangeMessages createOrUpdateChangeMessages(Long userId,
ChangeMessages batch) throws UnauthorizedException, NotFoundException {
adminCheck(userId);
ChangeMessages messages = new ChangeMessages();
messages.setList(changeDAO.replaceChange(batch.getList()));
return messages;
}
@Override
public IdGeneratorExport createIdGeneratorExport(Long userId) {
adminCheck(userId);
String script = idGenerator.createRestoreScript();
IdGeneratorExport export = new IdGeneratorExport();
export.setExportScript(script);
return export;
}
@Override
public FeatureStatus getFeatureStatus(Long userId, Feature feature) {
UserInfo user = userManager.getUserInfo(userId);
return featureManager.getFeatureStatus(user, feature);
}
@Override
public FeatureStatus setFeatureStatus(Long userId, Feature feature, FeatureStatus status) {
UserInfo user = userManager.getUserInfo(userId);
return featureManager.setFeatureStatus(user, feature, status);
};
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jdbi.v3.core;
import java.lang.reflect.Modifier;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.Objects;
import java.util.Properties;
import java.util.ServiceLoader;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicReference;
import javax.sql.DataSource;
import org.jdbi.v3.core.config.ConfigRegistry;
import org.jdbi.v3.core.config.Configurable;
import org.jdbi.v3.core.extension.ExtensionCallback;
import org.jdbi.v3.core.extension.ExtensionConsumer;
import org.jdbi.v3.core.extension.ExtensionFactory;
import org.jdbi.v3.core.extension.Extensions;
import org.jdbi.v3.core.extension.NoSuchExtensionException;
import org.jdbi.v3.core.spi.JdbiPlugin;
import org.jdbi.v3.core.statement.DefaultStatementBuilderFactory;
import org.jdbi.v3.core.statement.StatementBuilder;
import org.jdbi.v3.core.statement.StatementBuilderFactory;
import org.jdbi.v3.core.transaction.LocalTransactionHandler;
import org.jdbi.v3.core.transaction.TransactionHandler;
import org.jdbi.v3.core.transaction.TransactionIsolationLevel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class provides the access point for jDBI. Use it to obtain Handle instances
* and provide "global" configuration for all handles obtained from it.
*/
public class Jdbi implements Configurable<Jdbi>
{
private static final Logger LOG = LoggerFactory.getLogger(Jdbi.class);
private final ConfigRegistry config = new ConfigRegistry();
private final ConnectionFactory connectionFactory;
private final AtomicReference<TransactionHandler> transactionhandler = new AtomicReference<>(new LocalTransactionHandler());
private final AtomicReference<StatementBuilderFactory> statementBuilderFactory = new AtomicReference<>(new DefaultStatementBuilderFactory());
private final CopyOnWriteArrayList<JdbiPlugin> plugins = new CopyOnWriteArrayList<>();
private Jdbi(ConnectionFactory connectionFactory)
{
Objects.requireNonNull(connectionFactory, "null connectionFactory");
this.connectionFactory = connectionFactory;
}
/**
* @param dataSource the data source.
*
* @return a DBI which uses the given data source as a connection factory.
*/
public static Jdbi create(DataSource dataSource)
{
return create(dataSource::getConnection);
}
/**
* Factory used to allow for obtaining a Connection in a customized manner.
*
* <p>
* The {@link ConnectionFactory#openConnection()} method will be invoked to obtain a connection instance
* whenever a Handle is opened.
* </p>
*
* @param connectionFactory Provides JDBC connections to Handle instances
*
* @return a DBI which uses the given connection factory.
*/
public static Jdbi create(ConnectionFactory connectionFactory) {
return new Jdbi(connectionFactory);
}
/**
* @param url JDBC URL for connections
*
* @return a DBI which uses {@link DriverManager} as a connection factory.
*/
public static Jdbi create(final String url)
{
Objects.requireNonNull(url, "null url");
return create(() -> DriverManager.getConnection(url));
}
/**
* @param url JDBC URL for connections
* @param properties Properties to pass to DriverManager.getConnection(url, props) for each new handle
*
* @return a DBI which uses {@link DriverManager} as a connection factory.
*/
public static Jdbi create(final String url, final Properties properties)
{
Objects.requireNonNull(url, "null url");
Objects.requireNonNull(properties, "null properties");
return create(() -> DriverManager.getConnection(url, properties));
}
/**
* @param url JDBC URL for connections
* @param username User name for connection authentication
* @param password Password for connection authentication
*
* @return a DBI which uses {@link DriverManager} as a connection factory.
*/
public static Jdbi create(final String url, final String username, final String password)
{
Objects.requireNonNull(url, "null url");
Objects.requireNonNull(username, "null username");
Objects.requireNonNull(password, "null password");
return create(() -> DriverManager.getConnection(url, username, password));
}
/**
* Convenience method used to obtain a handle from a specific data source
*
* @param dataSource the JDBC data source.
*
* @return Handle using a Connection obtained from the provided DataSource
*/
public static Handle open(DataSource dataSource)
{
return create(dataSource).open();
}
/**
* Create a Handle wrapping a particular JDBC Connection
*
* @param connection the JDBC connection
*
* @return Handle bound to connection
*/
public static Handle open(final Connection connection)
{
Objects.requireNonNull(connection, "null connection");
return create(() -> connection).open();
}
/**
* Obtain a handle with just a JDBC URL
*
* @param url JDBC Url
*
* @return newly opened Handle
*/
public static Handle open(final String url)
{
return create(url).open();
}
/**
* Obtain a handle with just a JDBC URL
*
* @param url JDBC Url
* @param username JDBC username for authentication
* @param password JDBC password for authentication
*
* @return newly opened Handle
*/
public static Handle open(final String url, final String username, final String password)
{
return create(url, username, password).open();
}
/**
* Obtain a handle with just a JDBC URL
*
* @param url JDBC Url
* @param props JDBC properties
*
* @return newly opened Handle
*/
public static Handle open(final String url, final Properties props)
{
return create(url, props).open();
}
public Jdbi installPlugins()
{
ServiceLoader.load(JdbiPlugin.class).forEach(this::installPlugin);
LOG.debug("Automatically installed plugins {}", plugins);
return this;
}
public Jdbi installPlugin(JdbiPlugin plugin)
{
plugin.customizeJdbi(this);
plugins.add(plugin);
return this;
}
/**
* Allows customization of how prepared statements are created. When a Handle is created
* against this DBI instance the factory will be used to create a StatementBuilder for
* that specific handle. When the handle is closed, the StatementBuilder's close method
* will be invoked.
*
* @param factory the new statement builder factory.
* @return this
*/
public Jdbi setStatementBuilderFactory(StatementBuilderFactory factory)
{
this.statementBuilderFactory.set(factory);
return this;
}
public StatementBuilderFactory getStatementBuilderFactory()
{
return this.statementBuilderFactory.get();
}
@Override
public ConfigRegistry getConfig() {
return config;
}
/**
* Specify the TransactionHandler instance to use. This allows overriding
* transaction semantics, or mapping into different transaction
* management systems.
* <p>
* The default version uses local transactions on the database Connection
* instances obtained.
* </p>
*
* @param handler The TransactionHandler to use for all Handle instances obtained
* from this DBI
* @return this
*/
public Jdbi setTransactionHandler(TransactionHandler handler)
{
Objects.requireNonNull(handler, "null transaction handler");
this.transactionhandler.set(handler);
return this;
}
public TransactionHandler getTransactionHandler()
{
return this.transactionhandler.get();
}
/**
* Obtain a Handle to the data source wrapped by this DBI instance
*
* @return an open Handle instance
*/
public Handle open()
{
try {
final long start = System.nanoTime();
Connection conn = connectionFactory.openConnection();
final long stop = System.nanoTime();
for (JdbiPlugin p : plugins) {
conn = p.customizeConnection(conn);
}
StatementBuilder cache = statementBuilderFactory.get().createStatementBuilder(conn);
Handle h = new Handle(config.createCopy(), transactionhandler.get(), cache, conn);
for (JdbiPlugin p : plugins) {
h = p.customizeHandle(h);
}
LOG.trace("DBI [{}] obtain handle [{}] in {}ms", this, h, (stop - start) / 1000000L);
return h;
}
catch (SQLException e) {
throw new ConnectionException(e);
}
}
/**
* A convenience function which manages the lifecycle of a handle and yields it to a callback
* for use by clients.
*
* @param callback A callback which will receive an open Handle
* @param <R> type returned by the callback
* @param <X> exception type thrown by the callback, if any.
*
* @return the value returned by callback
*
* @throws X any exception thrown by the callback
*/
public <R, X extends Exception> R withHandle(HandleCallback<R, X> callback) throws X
{
try (Handle h = this.open()) {
return callback.withHandle(h);
}
}
/**
* A convenience function which manages the lifecycle of a handle and yields it to a callback
* for use by clients.
*
* @param callback A callback which will receive an open Handle
* @param <X> exception type thrown by the callback, if any.
*
* @throws X any exception thrown by the callback
*/
public <X extends Exception> void useHandle(final HandleConsumer<X> callback) throws X
{
withHandle(h -> { callback.useHandle(h); return null; });
}
/**
* A convenience function which manages the lifecycle of a handle and yields it to a callback
* for use by clients. The handle will be in a transaction when the callback is invoked, and
* that transaction will be committed if the callback finishes normally, or rolled back if the
* callback raises an exception.
*
* @param callback A callback which will receive an open Handle, in a transaction
* @param <R> type returned by the callback
* @param <X> exception type thrown by the callback, if any.
*
* @return the value returned by callback
*
* @throws X any exception thrown by the callback
*/
public <R, X extends Exception> R inTransaction(final HandleCallback<R, X> callback) throws X
{
return withHandle(handle -> handle.<R, X>inTransaction(callback));
}
/**
* A convenience function which manages the lifecycle of a handle and yields it to a callback
* for use by clients. The handle will be in a transaction when the callback is invoked, and
* that transaction will be committed if the callback finishes normally, or rolled back if the
* callback raises an exception.
*
* @param callback A callback which will receive an open Handle, in a transaction
* @param <X> exception type thrown by the callback, if any.
*
* @throws X any exception thrown by the callback
*/
public <X extends Exception> void useTransaction(final HandleConsumer<X> callback) throws X
{
useHandle(handle -> handle.useTransaction(callback));
}
/**
* A convenience function which manages the lifecycle of a handle and yields it to a callback
* for use by clients. The handle will be in a transaction when the callback is invoked, and
* that transaction will be committed if the callback finishes normally, or rolled back if the
* callback raises an exception.
*
* <p>
* This form accepts a transaction isolation level which will be applied to the connection
* for the scope of this transaction, after which the original isolation level will be restored.
* </p>
*
* @param level the transaction isolation level which will be applied to the connection for the scope of this
* transaction, after which the original isolation level will be restored.
* @param callback A callback which will receive an open Handle, in a transaction
* @param <R> type returned by the callback
* @param <X> exception type thrown by the callback, if any.
*
* @return the value returned by callback
*
* @throws X any exception thrown by the callback
*/
public <R, X extends Exception> R inTransaction(final TransactionIsolationLevel level, final HandleCallback<R, X> callback) throws X
{
return withHandle(handle -> handle.<R, X>inTransaction(level, callback));
}
/**
* A convenience function which manages the lifecycle of a handle and yields it to a callback
* for use by clients. The handle will be in a transaction when the callback is invoked, and
* that transaction will be committed if the callback finishes normally, or rolled back if the
* callback raises an exception.
*
* <p>
* This form accepts a transaction isolation level which will be applied to the connection
* for the scope of this transaction, after which the original isolation level will be restored.
* </p>
*
* @param level the transaction isolation level which will be applied to the connection for the scope of this
* transaction, after which the original isolation level will be restored.
* @param callback A callback which will receive an open Handle, in a transaction
* @param <X> exception type thrown by the callback, if any.
*
* @throws X any exception thrown by the callback
*/
public <X extends Exception> void useTransaction(final TransactionIsolationLevel level, final HandleConsumer<X> callback) throws X
{
useHandle(handle -> handle.useTransaction(level, callback));
}
/**
* A convenience method which opens an extension of the given type, yields it to a callback, and returns the result
* of the callback. A handle is opened if needed by the extension, and closed before returning to the caller.
*
* @param extensionType the type of extension.
* @param callback a callback which will receive the extension.
* @param <R> the return type
* @param <E> the extension type
* @param <X> the exception type optionally thrown by the callback
* @return the value returned by the callback.
* @throws NoSuchExtensionException if no {@link ExtensionFactory} is registered which supports the given extension
* type.
* @throws X if thrown by the callback.
*/
public <R, E, X extends Exception> R withExtension(Class<E> extensionType, ExtensionCallback<R, E, X> callback)
throws NoSuchExtensionException, X
{
try (LazyHandleSupplier handle = new LazyHandleSupplier(this, config)) {
E extension = getConfig(Extensions.class)
.findFor(extensionType, handle)
.orElseThrow(() -> new NoSuchExtensionException("Extension not found: " + extensionType));
return callback.withExtension(extension);
}
}
/**
* A convenience method which opens an extension of the given type, and yields it to a callback. A handle is opened
* if needed by the extention, and closed before returning to the caller.
*
* @param extensionType the type of extension
* @param callback a callback which will receive the extension
* @param <E> the extension type
* @param <X> the exception type optionally thrown by the callback
* @throws NoSuchExtensionException if no {@link ExtensionFactory} is registered which supports the given extension type.
* @throws X if thrown by the callback.
*/
public <E, X extends Exception> void useExtension(Class<E> extensionType, ExtensionConsumer<E, X> callback)
throws NoSuchExtensionException, X {
withExtension(extensionType, extension -> {
callback.useExtension(extension);
return null;
});
}
/**
* @param extensionType the type of extension. Must be a public interface type.
* @param <E> the extension type
*
* @return an extension which opens and closes handles (as needed) for individual method calls. Only public
* interface types may be used as on-demand extensions.
*/
public <E> E onDemand(Class<E> extensionType) throws NoSuchExtensionException {
if (!extensionType.isInterface()) {
throw new IllegalArgumentException("On-demand extensions are only supported for interfaces.");
}
if (!Modifier.isPublic(extensionType.getModifiers())) {
throw new IllegalArgumentException("On-demand extensions types must be public.");
}
if (!getConfig(Extensions.class).hasExtensionFor(extensionType)) {
throw new NoSuchExtensionException("Extension not found: " + extensionType);
}
return OnDemandExtensions.create(this, extensionType);
}
}
| |
/*
* (c) 2017 Michael A. Beck, Sebastian Henningsen
* disco | Distributed Computer Systems Lab
* University of Kaiserslautern, Germany
* All Rights Reserved.
*
* This software is work in progress and is released in the hope that it will
* be useful to the scientific community. It is provided "as is" without
* express or implied warranty, including but not limited to the correctness
* of the code or its suitability for any particular purpose.
*
* This software is provided under the MIT License, however, we would
* appreciate it if you contacted the respective authors prior to commercial use.
*
* If you find our software useful, we would appreciate if you mentioned it
* in any publication arising from the use of this software or acknowledge
* our work otherwise. We would also like to hear of any fixes or useful
*/
package unikl.disco.misc;
import org.apache.commons.math3.fraction.Fraction;
import java.lang.Number;
//TODO What to return in the default case?
//TODO What to return if the type check fails?
//TODO Throw proper Exceptions such as FractionConversionException
public class NumberObj {
//FIXME Implement a global flag in Analysis.java (?) and read that out
// private static NumberObjType type = NumberObjType.DOUBLE;
private static NumberObjType type = NumberObjType.RATIONAL;
private static boolean performTypeChecks = false;
private Number value;
public static final NumberObj POSITIVE_INFINITY = getPosInfinity();
public static final NumberObj NEGATIVE_INFINITY = getNegInfinity();
public static final NumberObj NaN = getNaN();
public NumberObj(){
switch ( type ) {
case DOUBLE:
this.value = new Double( 0.0 );
break;
case RATIONAL:
this.value = new Fraction( 0.0 );
break;
default:
}
}
public NumberObj( double value ) {
switch ( type ) {
case DOUBLE:
this.value = new Double( value );
break;
case RATIONAL:
this.value = new Fraction( value );
break;
default:
System.exit( 0 );
}
}
private NumberObj( Number value ) {
this.value = value;
}
public static NumberObj DoubleToNumberObj( Double doubleObj ){
return new NumberObj( doubleObj );
}
private static NumberObj getNaN() {
switch ( type ) {
case DOUBLE:
return new NumberObj( Double.NaN );
case RATIONAL:
return new NumberObj( new Fraction( Double.NaN ) );
default:
return null;
}
}
private static NumberObj getPosInfinity() {
switch ( type ) {
case DOUBLE:
return new NumberObj( Double.POSITIVE_INFINITY );
case RATIONAL:
// Fraction is based in Integer and thus there's no infinity (and it is prone to overflows)
return new NumberObj( new Fraction( Integer.MAX_VALUE ) );
default:
return null;
}
}
private static NumberObj getNegInfinity() {
switch ( type ) {
case DOUBLE:
return new NumberObj( Double.NEGATIVE_INFINITY );
case RATIONAL:
// Fraction is based in Integer and thus there's no infinity (and it is prone to overflows)
return new NumberObj( new Fraction( Integer.MIN_VALUE ) );
default:
return null;
}
}
public static NumberObj getEpsilon() {
switch ( type ) {
case DOUBLE:
return new NumberObj( new Double( 1e-6 ) );
case RATIONAL:
// unfortunately you cannot give the constructor the double value 0.0000001
return new NumberObj( new Fraction( 1, 1000000 ) );
default:
return null;
}
}
public NumberObjType getType() {
return type;
}
private static boolean TypeCheck( NumberObj num1, NumberObj num2 ) {
if ( num1.getType() != num2.getType() ) {
try {
throw new Exception();
} catch (Exception e) {
e.printStackTrace();
}
}
return true;
}
public Number getValue() {
return value;
}
// In order to simplify the transition from the primitive data type double to
// a Double object wrapped around it or a rational number object
// these functions emulate copy by value for objects that
// typically inhibit copy by reference in Java
public static NumberObj add( NumberObj num1, NumberObj num2 ) {
if ( performTypeChecks ) {
TypeCheck( num1, num2 );
}
if( num1.equals( NaN ) || num2.equals( NaN ) ) {
return NaN;
}
// prevent overflow exception when adding integer based number representations like Fraction
if( num1.equals( POSITIVE_INFINITY ) || num2.equals( POSITIVE_INFINITY ) ) {
return POSITIVE_INFINITY;
}
if( num1.equals( NEGATIVE_INFINITY ) || num2.equals( NEGATIVE_INFINITY ) ) {
return NEGATIVE_INFINITY;
}
switch ( num1.getType() ) {
case DOUBLE:
Double double1 = (Double) num1.getValue();
Double double2 = (Double) num2.getValue();
return new NumberObj( double1.doubleValue() + double2.doubleValue() );
case RATIONAL:
Fraction frac1 = (Fraction) num1.getValue();
Fraction frac2 = (Fraction) num2.getValue();
// may throw MathArithmeticException due to integer overflow
return new NumberObj( frac1.add( frac2 ) );
default:
return null;
}
}
public static NumberObj sub( NumberObj num1, NumberObj num2 ) {
if ( performTypeChecks ) {
TypeCheck( num1, num2 );
}
if( num1.equals( NaN ) || num2.equals( NaN ) ) {
return NaN;
}
// prevent overflow exception when adding integer based number representations like Fraction
if( num1.equals( POSITIVE_INFINITY ) || num2.equals( POSITIVE_INFINITY ) ) {
return POSITIVE_INFINITY;
}
if( num1.equals( NEGATIVE_INFINITY ) || num2.equals( NEGATIVE_INFINITY ) ) {
return NEGATIVE_INFINITY;
}
switch ( num1.getType() ) {
case DOUBLE:
Double double1 = (Double) num1.getValue();
Double double2 = (Double) num2.getValue();
return new NumberObj( double1.doubleValue() - double2.doubleValue() );
case RATIONAL:
Fraction frac1 = (Fraction) num1.getValue();
Fraction frac2 = (Fraction) num2.getValue();
// may throw MathArithmeticException due to integer overflow
return new NumberObj( frac1.subtract( frac2 ) );
default:
return null;
}
}
public static NumberObj mult( NumberObj num1, NumberObj num2 ) {
if ( performTypeChecks ) {
TypeCheck( num1, num2 );
}
if( num1.equals( NaN ) || num2.equals( NaN ) ) {
return NaN;
}
// prevent overflow exception when adding integer based number representations like Fraction
if( num1.equals( POSITIVE_INFINITY ) || num2.equals( POSITIVE_INFINITY ) ) {
return POSITIVE_INFINITY;
}
if( num1.equals( NEGATIVE_INFINITY ) || num2.equals( NEGATIVE_INFINITY ) ) {
return NEGATIVE_INFINITY;
}
switch ( num1.getType() ) {
case DOUBLE:
Double double1 = (Double) num1.getValue();
Double double2 = (Double) num2.getValue();
return new NumberObj( double1.doubleValue() * double2.doubleValue() );
case RATIONAL:
Fraction frac1 = (Fraction) num1.getValue();
Fraction frac2 = (Fraction) num2.getValue();
// may throw MathArithmeticException due to integer overflow
return new NumberObj( frac1.multiply( frac2 ) );
default:
return null;
}
}
public static NumberObj div( NumberObj num1, NumberObj num2 ) {
if ( performTypeChecks ) {
TypeCheck( num1, num2 );
}
if( num1.equals( NaN ) || num2.equals( NaN ) ) {
return NaN;
}
// Integer based number representations use Integer.MAX_VALUE to signal infinity so special treatment is necessary when dividing
if( num1.equals( POSITIVE_INFINITY ) ) {
return POSITIVE_INFINITY;
}
if( num2.equals( POSITIVE_INFINITY ) ) {
return new NumberObj( 0.0 );
}
if( num1.equals( NEGATIVE_INFINITY ) ) {
return NEGATIVE_INFINITY;
}
if( num2.equals( NEGATIVE_INFINITY ) ) {
return new NumberObj( 0.0 );
}
switch ( num1.getType() ) {
case DOUBLE:
Double double1 = (Double) num1.getValue();
Double double2 = (Double) num2.getValue();
return new NumberObj( double1.doubleValue() / double2.doubleValue() );
case RATIONAL:
Fraction frac2 = (Fraction) num2.getValue();
if ( frac2.getNumerator() == 0 ) {
return getPosInfinity();
// return getNaN();
} else {
Fraction frac1 = (Fraction) num1.getValue();
return new NumberObj( frac1.divide( frac2 ) );
}
default:
return null;
}
}
public NumberObj copy() {
switch ( type ) {
case DOUBLE:
return new NumberObj( value );
case RATIONAL:
return new NumberObj( value );
default:
return new NumberObj( 0.0 );
}
}
/*
* java.lang.Math's max(double, double) description:
*
* Returns the greater of two double values. That is, the result is the argument closer to positive infinity.
* If the arguments have the same value, the result is that same value. If either value is NaN, then the result is NaN.
* Unlike the numerical comparison operators, this method considers negative zero to be strictly smaller than positive zero.
* If one argument is positive zero and the other negative zero, the result is positive zero.
*
* SB's thoughts because there's no max() in Fraction:
* "result is that same value" implicitly uses copy by value semantics
* Can a fraction be NaN?
* The -/+ zero stuff is not needed
*/
public static NumberObj max( NumberObj num1, NumberObj num2 ) {
if ( performTypeChecks ) {
TypeCheck( num1, num2 );
}
switch ( num1.getType() ) {
case DOUBLE:
Double double1 = (Double) num1.getValue();
Double double2 = (Double) num2.getValue();
return new NumberObj( Math.max( double1.doubleValue(), double2.doubleValue() ) );
case RATIONAL:
Fraction frac1 = (Fraction) num1.getValue();
Fraction frac2 = (Fraction) num2.getValue();
// operate on Doubles to prevent getting out of Integer's range
double frac1_numerator = new Double( frac1.getNumerator() ) * new Double( frac2.getDenominator() );
double frac2_numerator = new Double( frac2.getNumerator() ) * new Double( frac1.getDenominator() );
if ( frac1_numerator >= frac2_numerator ) {
return new NumberObj( frac1 );
} else {
return new NumberObj( frac2 );
}
default:
return null;
}
}
public static NumberObj min( NumberObj num1, NumberObj num2 ) {
if ( performTypeChecks ) {
TypeCheck( num1, num2 );
}
switch ( num1.getType() ) {
case DOUBLE:
Double double1 = (Double) num1.getValue();
Double double2 = (Double) num2.getValue();
return new NumberObj( Math.min( double1.doubleValue(), double2.doubleValue() ) );
case RATIONAL:
Fraction frac1 = (Fraction) num1.getValue();
Fraction frac2 = (Fraction) num2.getValue();
// operate on Doubles to prevent getting out of Integer's range
double frac1_numerator = new Double( frac1.getNumerator() ) * new Double( frac2.getDenominator() );
double frac2_numerator = new Double( frac2.getNumerator() ) * new Double( frac1.getDenominator() );
if ( frac1_numerator < frac2_numerator ) {
return new NumberObj( frac1 );
} else {
return new NumberObj( frac2 );
}
default:
return null;
}
}
public boolean equals( double num2 ) {
if( num2 == Double.NaN ){
return value.equals( NaN );
}
if( num2 == Double.POSITIVE_INFINITY ){
return value.equals( POSITIVE_INFINITY );
}
if( num2 == Double.NEGATIVE_INFINITY ){
return value.equals( NEGATIVE_INFINITY );
}
return equals( new NumberObj( num2 ) );
}
public boolean equals( NumberObj num2 ) {
if ( performTypeChecks ) {
TypeCheck( this, num2 );
}
if( this == NaN & num2 == NaN ){
return true;
}
if( this == POSITIVE_INFINITY & num2 == POSITIVE_INFINITY ){
return true;
}
if( this == NEGATIVE_INFINITY & num2 == NEGATIVE_INFINITY ){
return true;
}
switch ( type ) {
case DOUBLE:
Double double2 = (Double) num2.getValue();
return value.doubleValue() == double2.doubleValue();
case RATIONAL:
//Fractions's equals() method is inherited from java.lang.Object
Fraction frac1 = (Fraction) value;
Fraction frac2 = (Fraction) num2.getValue();
// operate on Doubles to prevent getting out of Integer's range
double frac1_num = new Double( frac1.getNumerator() ) * new Double( frac2.getDenominator() );
double frac2_num = new Double( frac2.getNumerator() ) * new Double( frac1.getDenominator() );
if ( frac1_num == frac2_num ) {
return true;
} else {
return false;
}
default:
return false;
}
}
public boolean greater( NumberObj num2 ) {
if ( performTypeChecks ) {
TypeCheck( this, num2 );
}
if( this == NaN || num2 == NaN ){
return false;
}
if( this == POSITIVE_INFINITY & num2 == POSITIVE_INFINITY ){
return false;
}
if( this == NEGATIVE_INFINITY & num2 == NEGATIVE_INFINITY ){
return false;
}
switch ( type ) {
case DOUBLE:
Double double2 = (Double) num2.getValue();
return value.doubleValue() > double2.doubleValue();
case RATIONAL:
Fraction frac1 = (Fraction) value;
Fraction frac2 = (Fraction) num2.getValue();
// operate on Doubles to prevent getting out of Integer's range
double frac1_num = new Double( frac1.getNumerator() ) * new Double( frac2.getDenominator() );
double frac2_num = new Double( frac2.getNumerator() ) * new Double( frac1.getDenominator() );
if ( frac1_num > frac2_num ) {
return true;
} else {
return false;
}
default:
return false;
}
}
public boolean ge( NumberObj num2 ) {
if ( performTypeChecks ) {
TypeCheck( this, num2 );
}
if( this == NaN || num2 == NaN ){
return false;
}
if( this == POSITIVE_INFINITY & num2 == POSITIVE_INFINITY ){
return true;
}
if( this == NEGATIVE_INFINITY & num2 == NEGATIVE_INFINITY ){
return true;
}
switch ( type ) {
case DOUBLE:
Double double2 = (Double) num2.getValue();
return value.doubleValue() >= double2.doubleValue();
case RATIONAL:
Fraction frac1 = (Fraction) value;
Fraction frac2 = (Fraction) num2.getValue();
// operate on Doubles to prevent getting out of Integer's range
double frac1_num = new Double( frac1.getNumerator() ) * new Double( frac2.getDenominator() );
double frac2_num = new Double( frac2.getNumerator() ) * new Double( frac1.getDenominator() );
if ( frac1_num >= frac2_num ) {
return true;
} else {
return false;
}
default:
return false;
}
}
public boolean less( NumberObj num2 ) {
if ( performTypeChecks ) {
TypeCheck( this, num2 );
}
if( this == NaN || num2 == NaN ){
return false;
}
if( this == POSITIVE_INFINITY & num2 == POSITIVE_INFINITY ){
return false;
}
if( this == NEGATIVE_INFINITY & num2 == NEGATIVE_INFINITY ){
return false;
}
switch ( type ) {
case DOUBLE:
Double double2 = (Double) num2.getValue();
return value.doubleValue() < double2.doubleValue();
case RATIONAL:
Fraction frac1 = (Fraction) value;
Fraction frac2 = (Fraction) num2.getValue();
// operate on Doubles to prevent getting out of Integer's range
double frac1_num = new Double( frac1.getNumerator() ) * new Double( frac2.getDenominator() );
double frac2_num = new Double( frac2.getNumerator() ) * new Double( frac1.getDenominator() );
if ( frac1_num < frac2_num ) {
return true;
} else {
return false;
}
default:
return false;
}
}
public boolean le( NumberObj num2 ) {
if ( performTypeChecks ) {
TypeCheck( this, num2 );
}
if( this == NaN || num2 == NaN ){
return false;
}
if( this == POSITIVE_INFINITY & num2 == POSITIVE_INFINITY ){
return true;
}
if( this == NEGATIVE_INFINITY & num2 == NEGATIVE_INFINITY ){
return true;
}
switch ( type ) {
case DOUBLE:
Double double2 = (Double) num2.getValue();
return value.doubleValue() <= double2.doubleValue();
case RATIONAL:
Fraction frac1 = (Fraction) value;
Fraction frac2 = (Fraction) num2.getValue();
// operate on Doubles to prevent getting out of Integer's range
double frac1_num = new Double( frac1.getNumerator() ) * new Double( frac2.getDenominator() );
double frac2_num = new Double( frac2.getNumerator() ) * new Double( frac1.getDenominator() );
if ( frac1_num <= frac2_num ) {
return true;
} else {
return false;
}
default:
return false;
}
}
public static NumberObj abs( NumberObj num ) {
if ( num.equals( NaN ) ) {
return NaN;
}
if ( num.equals( POSITIVE_INFINITY ) ) {
return POSITIVE_INFINITY;
}
if ( num.equals( NEGATIVE_INFINITY ) ) {
return NEGATIVE_INFINITY;
}
switch ( num.getType() ) {
case DOUBLE:
Double double_value = (Double) num.getValue();
return new NumberObj( Math.abs( double_value.doubleValue() ) );
case RATIONAL:
Fraction frac = (Fraction) num.getValue();
return new NumberObj( frac.abs() );
default:
return new NumberObj( 0.0 );
}
}
public static NumberObj negate( NumberObj num ) {
if ( num.equals( NaN ) ) {
return NaN;
}
if ( num.equals( POSITIVE_INFINITY ) ) {
return NEGATIVE_INFINITY;
}
if ( num.equals( NEGATIVE_INFINITY ) ) {
return POSITIVE_INFINITY;
}
switch ( num.getType() ) {
case DOUBLE:
Double double_value = (Double) num.getValue();
return new NumberObj( -(double_value.doubleValue()) );
case RATIONAL:
Fraction frac = (Fraction) num.getValue();
return new NumberObj( frac.negate() );
default:
return new NumberObj( 0.0 );
}
}
@Override
public String toString(){
if ( this.equals( NaN ) ) {
return "NaN";
}
if ( this.equals( POSITIVE_INFINITY ) ) {
return "Infinity";
}
if ( this.equals( NEGATIVE_INFINITY ) ) {
return "-Infinity";
}
switch ( this.getType() ) {
case DOUBLE:
return Double.toString( (Double)value );
case RATIONAL:
return ( (Fraction)value ).toString();
default:
return "invalied number";
}
}
}
| |
package com.twitter.elephantbird.util;
import com.google.common.collect.ImmutableMap;
import com.twitter.elephantbird.util.Strings;
import org.junit.Test;
import java.util.Map;
import static junit.framework.Assert.assertEquals;
/**
* Tests for Strings.
*
* Largely taken from the defunct rogueweb project at
* <link>https://code.google.com/p/rogueweb/</link>
* Original author Anthony Eden.
*/
public class TestStrings {
private static final Map<String, String> singularToPlural_ = ImmutableMap.<String, String>builder()
.put("search", "searches")
.put("switch", "switches")
.put("fix", "fixes")
.put("box", "boxes")
.put("process", "processes")
.put("address", "addresses")
.put("case", "cases")
.put("stack", "stacks")
.put("wish", "wishes")
.put("fish", "fish")
.put("category", "categories")
.put("query", "queries")
.put("ability", "abilities")
.put("agency", "agencies")
.put("movie", "movies")
.put("company", "companies")
.put("archive", "archives")
.put("index", "indices")
.put("wife", "wives")
.put("safe", "saves")
.put("half", "halves")
.put("move", "moves")
.put("salesperson", "salespeople")
.put("person", "people")
.put("spokesman", "spokesmen")
.put("man", "men")
.put("woman", "women")
.put("basis", "bases")
.put("diagnosis", "diagnoses")
.put("datum", "data")
.put("medium", "media")
.put("analysis", "analyses")
.put("node_child", "node_children")
.put("child", "children")
.put("experience", "experiences")
.put("day", "days")
.put("comment", "comments")
.put("foobar", "foobars")
.put("newsletter", "newsletters")
.put("old_news", "old_news")
.put("news", "news")
.put("series", "series")
.put("species", "species")
.put("quiz", "quizzes")
.put("perspective", "perspectives")
.put("ox", "oxen")
.put("photo", "photos")
.put("buffalo", "buffaloes")
.put("tomato", "tomatoes")
.put("dwarf", "dwarves")
.put("elf", "elves")
.put("information", "information")
.put("equipment", "equipment")
.put("bus", "buses")
.put("status", "statuses")
.put("status_code", "status_codes")
.put("mouse", "mice")
.put("louse", "lice")
.put("house", "houses")
.put("octopus", "octopi")
.put("virus", "viri")
.put("alias", "aliases")
.put("portfolio", "portfolios")
.put("vertex", "vertices")
.put("matrix", "matrices")
.put("axis", "axes")
.put("testis", "testes")
.put("crisis", "crises")
.put("rice", "rice")
.put("shoe", "shoes")
.put("horse", "horses")
.put("prize", "prizes")
.put("edge", "edges")
.build();
private static final Map<String, String> mixtureToTitleCase_ = ImmutableMap.<String, String>builder()
.put("active_record", "Active Record")
.put("ActiveRecord", "Active Record")
.put("action web service", "Action Web Service")
.put("Action Web Service", "Action Web Service")
.put("Action web service", "Action Web Service")
.put("actionwebservice", "Actionwebservice")
.put("Actionwebservice", "Actionwebservice")
.build();
private static final Map<String, String> camelToUnderscore_ = ImmutableMap.<String, String>builder()
.put("Product", "product")
.put("SpecialGuest", "special_guest")
.put("ApplicationController", "application_controller")
.put("Area51Controller", "area51_controller")
.put("InnerClass$Test", "inner_class__test")
.build();
private static final Map<String, String> camelToUnderscoreWithoutReverse_ = ImmutableMap.<String, String>builder()
.put("HTMLTidy", "html_tidy")
.put("HTMLTidyGenerator", "html_tidy_generator")
.put("FreeBSD", "free_bsd")
.put("HTML", "html")
.build();
private static final Map<String, String> camelWithPackageToUnderscoreWithSlash_ = ImmutableMap.<String, String>builder()
.put("admin.Product", "admin/product")
.put("users.commission.Department", "users/commission/department")
.put("usersSection.CommissionDepartment", "users_section/commission_department")
.build();
private static final Map<String, String> classNameToForeignKeyWithUnderscore_ = ImmutableMap.<String, String>builder()
.put("Person", "person_id")
.put("application.billing.Account", "account_id")
.build();
private static final Map<String, String> classNameToTableName_ = ImmutableMap.<String, String>builder()
.put("PrimarySpokesman", "primary_spokesmen")
.put("NodeChild", "node_children")
.build();
private static final Map<String, String> underscoreToHuman_ = ImmutableMap.<String, String>builder()
.put("employee_salary", "Employee salary")
.put("employee_id", "Employee")
.put("underground", "Underground")
.build();
private static final Map<Integer, String> ordinalNumbers_ = ImmutableMap.<Integer, String>builder()
.put(0, "0th")
.put(1, "1st")
.put(2, "2nd")
.put(3, "3rd")
.put(4, "4th")
.put(5, "5th")
.put(6, "6th")
.put(7, "7th")
.put(8, "8th")
.put(9, "9th")
.put(10, "10th")
.put(11, "11th")
.put(12, "12th")
.put(13, "13th")
.put(14, "14th")
.put(20, "20th")
.put(21, "21st")
.put(22, "22nd")
.put(23, "23rd")
.put(24, "24th")
.put(100, "100th")
.put(101, "101st")
.put(102, "102nd")
.put(103, "103rd")
.put(104, "104th")
.put(110, "110th")
.put(1000, "1000th")
.put(1001, "1001st")
.put(10013, "10013th")
.build();
private static final Map<String, String> underscoresToDashes_ = ImmutableMap.<String, String>builder()
.put("street", "street")
.put("street_address", "street-address")
.put("person_street_address", "person-street-address")
.build();
private static final Map<String, String> underscoreToLowerCamel_ = ImmutableMap.<String, String>builder()
.put("product","product")
.put("special_guest","specialGuest")
.put("application_controller","applicationController")
.put("area51_controller","area51Controller")
.build();
@Test
public void testPluralizePlurals() {
assertEquals("plurals", Strings.pluralize("plurals"));
assertEquals("Plurals", Strings.pluralize("Plurals"));
}
@Test
public void testPluralize() {
for (Map.Entry<String, String> entry : singularToPlural_.entrySet()) {
String singular = entry.getKey();
String plural = entry.getValue();
assertEquals(plural, Strings.pluralize(singular));
assertEquals(Strings.capitalize(plural),
Strings.pluralize(Strings.capitalize(singular)));
}
}
@Test
public void testSingularize() {
for (Map.Entry<String, String> entry : singularToPlural_.entrySet()) {
String singular = entry.getKey();
String plural = entry.getValue();
assertEquals(singular, Strings.singularize(plural));
assertEquals(Strings.capitalize(singular),
Strings.singularize(Strings.capitalize(plural)));
}
}
@Test
public void testTitleize() {
for (Map.Entry<String, String> entry : mixtureToTitleCase_.entrySet()) {
assertEquals(entry.getValue(), Strings.titleize(entry.getKey()));
}
}
@Test
public void testCamelize() {
for (Map.Entry<String, String> entry : camelToUnderscore_.entrySet()) {
assertEquals(entry.getKey(), Strings.camelize(entry.getValue()));
}
}
@Test
public void testUnderscore() {
for (Map.Entry<String, String> entry : camelToUnderscore_.entrySet()) {
assertEquals(entry.getValue(), Strings.underscore(entry.getKey()));
}
for (Map.Entry<String, String> entry : camelToUnderscoreWithoutReverse_.entrySet()) {
assertEquals(entry.getValue(), Strings.underscore(entry.getKey()));
}
}
@Test
public void testCamelizeWithPackage() {
for (Map.Entry<String, String> entry : camelWithPackageToUnderscoreWithSlash_.entrySet()) {
assertEquals(entry.getKey(), Strings.camelize(entry.getValue()));
}
}
@Test
public void testUnderscoreWithSlashes() {
for (Map.Entry<String, String> entry : camelWithPackageToUnderscoreWithSlash_.entrySet()) {
assertEquals(entry.getValue(), Strings.underscore(entry.getKey()));
}
}
@Test
public void testDepackage() {
assertEquals("Account", Strings.depackage("application.billing.Account"));
}
@Test
public void testForeignKey() {
for (Map.Entry<String, String> entry : classNameToForeignKeyWithUnderscore_.entrySet()) {
assertEquals(entry.getValue(), Strings.foreignKey(entry.getKey()));
}
}
@Test
public void testTableize() {
for (Map.Entry<String, String> entry : classNameToTableName_.entrySet()) {
assertEquals(entry.getValue(), Strings.tableize(entry.getKey()));
}
}
@Test
public void testClassify() {
for (Map.Entry<String, String> entry : classNameToTableName_.entrySet()) {
assertEquals(entry.getKey(), Strings.classify(entry.getValue()));
}
}
@Test
public void testHumanize() {
for (Map.Entry<String, String> entry : underscoreToHuman_.entrySet()) {
assertEquals(entry.getValue(), Strings.humanize(entry.getKey()));
}
}
@Test
public void testOrdinal() {
for (Map.Entry<Integer, String> entry : ordinalNumbers_.entrySet()) {
assertEquals(entry.getValue(), Strings.ordinalize(entry.getKey()));
assertEquals(entry.getValue(), Strings.ordinalize(entry.getKey()));
}
}
@Test
public void testDasherize() {
for (Map.Entry<String, String> entry : underscoresToDashes_.entrySet()) {
assertEquals(entry.getValue(), Strings.dasherize(entry.getKey()));
}
}
@Test
public void testUnderscoredAsReverseOfDasherize() {
for (Map.Entry<String, String> entry : underscoresToDashes_.entrySet()) {
assertEquals(entry.getKey(), Strings.underscore(Strings.dasherize(entry.getKey())));
}
}
@Test
public void testUnderscoreToLowerCamel() {
for (Map.Entry<String, String> entry : underscoreToLowerCamel_.entrySet()) {
assertEquals(entry.getValue(), Strings.camelize(entry.getKey(), true));
}
}
@Test
public void testCapitalize() {
assertEquals("Foo bar baz", Strings.capitalize("foo bar baz"));
}
@Test
public void testClassNameToTableName() {
assertEquals("companies", Strings.tableize("com.aetrion.activerecord.fixture.Companies"));
}
}
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.network.v2019_02_01;
import com.microsoft.azure.arm.model.HasInner;
import com.microsoft.azure.arm.resources.models.Resource;
import com.microsoft.azure.arm.resources.models.GroupableResourceCore;
import com.microsoft.azure.arm.resources.models.HasResourceGroup;
import com.microsoft.azure.arm.model.Refreshable;
import com.microsoft.azure.arm.model.Updatable;
import com.microsoft.azure.arm.model.Appliable;
import com.microsoft.azure.arm.model.Creatable;
import com.microsoft.azure.arm.resources.models.HasManager;
import com.microsoft.azure.management.network.v2019_02_01.implementation.NetworkManager;
import java.util.List;
import com.microsoft.azure.SubResource;
import com.microsoft.azure.management.network.v2019_02_01.implementation.LocalNetworkGatewayInner;
import com.microsoft.azure.management.network.v2019_02_01.implementation.VirtualNetworkGatewayInner;
import com.microsoft.azure.management.network.v2019_02_01.implementation.VirtualNetworkGatewayConnectionInner;
/**
* Type representing VirtualNetworkGatewayConnection.
*/
public interface VirtualNetworkGatewayConnection extends HasInner<VirtualNetworkGatewayConnectionInner>, Resource, GroupableResourceCore<NetworkManager, VirtualNetworkGatewayConnectionInner>, HasResourceGroup, Refreshable<VirtualNetworkGatewayConnection>, Updatable<VirtualNetworkGatewayConnection.Update>, HasManager<NetworkManager> {
/**
* @return the authorizationKey value.
*/
String authorizationKey();
/**
* @return the connectionProtocol value.
*/
VirtualNetworkGatewayConnectionProtocol connectionProtocol();
/**
* @return the connectionStatus value.
*/
VirtualNetworkGatewayConnectionStatus connectionStatus();
/**
* @return the connectionType value.
*/
VirtualNetworkGatewayConnectionType connectionType();
/**
* @return the egressBytesTransferred value.
*/
Long egressBytesTransferred();
/**
* @return the enableBgp value.
*/
Boolean enableBgp();
/**
* @return the etag value.
*/
String etag();
/**
* @return the expressRouteGatewayBypass value.
*/
Boolean expressRouteGatewayBypass();
/**
* @return the ingressBytesTransferred value.
*/
Long ingressBytesTransferred();
/**
* @return the ipsecPolicies value.
*/
List<IpsecPolicy> ipsecPolicies();
/**
* @return the localNetworkGateway2 value.
*/
LocalNetworkGateway localNetworkGateway2();
/**
* @return the peer value.
*/
SubResource peer();
/**
* @return the provisioningState value.
*/
String provisioningState();
/**
* @return the resourceGuid value.
*/
String resourceGuid();
/**
* @return the routingWeight value.
*/
Integer routingWeight();
/**
* @return the sharedKey value.
*/
String sharedKey();
/**
* @return the tunnelConnectionStatus value.
*/
List<TunnelConnectionHealth> tunnelConnectionStatus();
/**
* @return the usePolicyBasedTrafficSelectors value.
*/
Boolean usePolicyBasedTrafficSelectors();
/**
* @return the virtualNetworkGateway1 value.
*/
VirtualNetworkGateway virtualNetworkGateway1();
/**
* @return the virtualNetworkGateway2 value.
*/
VirtualNetworkGateway virtualNetworkGateway2();
/**
* The entirety of the VirtualNetworkGatewayConnection definition.
*/
interface Definition extends DefinitionStages.Blank, DefinitionStages.WithGroup, DefinitionStages.WithConnectionType, DefinitionStages.WithVirtualNetworkGateway1, DefinitionStages.WithCreate {
}
/**
* Grouping of VirtualNetworkGatewayConnection definition stages.
*/
interface DefinitionStages {
/**
* The first stage of a VirtualNetworkGatewayConnection definition.
*/
interface Blank extends GroupableResourceCore.DefinitionWithRegion<WithGroup> {
}
/**
* The stage of the VirtualNetworkGatewayConnection definition allowing to specify the resource group.
*/
interface WithGroup extends GroupableResourceCore.DefinitionStages.WithGroup<WithConnectionType> {
}
/**
* The stage of the virtualnetworkgatewayconnection definition allowing to specify ConnectionType.
*/
interface WithConnectionType {
/**
* Specifies connectionType.
* @param connectionType Gateway connection type. Possible values include: 'IPsec', 'Vnet2Vnet', 'ExpressRoute', 'VPNClient'
* @return the next definition stage
*/
WithVirtualNetworkGateway1 withConnectionType(VirtualNetworkGatewayConnectionType connectionType);
}
/**
* The stage of the virtualnetworkgatewayconnection definition allowing to specify VirtualNetworkGateway1.
*/
interface WithVirtualNetworkGateway1 {
/**
* Specifies virtualNetworkGateway1.
* @param virtualNetworkGateway1 The reference to virtual network gateway resource
* @return the next definition stage
*/
WithCreate withVirtualNetworkGateway1(VirtualNetworkGatewayInner virtualNetworkGateway1);
}
/**
* The stage of the virtualnetworkgatewayconnection definition allowing to specify AuthorizationKey.
*/
interface WithAuthorizationKey {
/**
* Specifies authorizationKey.
* @param authorizationKey The authorizationKey
* @return the next definition stage
*/
WithCreate withAuthorizationKey(String authorizationKey);
}
/**
* The stage of the virtualnetworkgatewayconnection definition allowing to specify ConnectionProtocol.
*/
interface WithConnectionProtocol {
/**
* Specifies connectionProtocol.
* @param connectionProtocol Connection protocol used for this connection. Possible values include: 'IKEv2', 'IKEv1'
* @return the next definition stage
*/
WithCreate withConnectionProtocol(VirtualNetworkGatewayConnectionProtocol connectionProtocol);
}
/**
* The stage of the virtualnetworkgatewayconnection definition allowing to specify EnableBgp.
*/
interface WithEnableBgp {
/**
* Specifies enableBgp.
* @param enableBgp EnableBgp flag
* @return the next definition stage
*/
WithCreate withEnableBgp(Boolean enableBgp);
}
/**
* The stage of the virtualnetworkgatewayconnection definition allowing to specify Etag.
*/
interface WithEtag {
/**
* Specifies etag.
* @param etag Gets a unique read-only string that changes whenever the resource is updated
* @return the next definition stage
*/
WithCreate withEtag(String etag);
}
/**
* The stage of the virtualnetworkgatewayconnection definition allowing to specify ExpressRouteGatewayBypass.
*/
interface WithExpressRouteGatewayBypass {
/**
* Specifies expressRouteGatewayBypass.
* @param expressRouteGatewayBypass Bypass ExpressRoute Gateway for data forwarding
* @return the next definition stage
*/
WithCreate withExpressRouteGatewayBypass(Boolean expressRouteGatewayBypass);
}
/**
* The stage of the virtualnetworkgatewayconnection definition allowing to specify IpsecPolicies.
*/
interface WithIpsecPolicies {
/**
* Specifies ipsecPolicies.
* @param ipsecPolicies The IPSec Policies to be considered by this connection
* @return the next definition stage
*/
WithCreate withIpsecPolicies(List<IpsecPolicy> ipsecPolicies);
}
/**
* The stage of the virtualnetworkgatewayconnection definition allowing to specify LocalNetworkGateway2.
*/
interface WithLocalNetworkGateway2 {
/**
* Specifies localNetworkGateway2.
* @param localNetworkGateway2 The reference to local network gateway resource
* @return the next definition stage
*/
WithCreate withLocalNetworkGateway2(LocalNetworkGatewayInner localNetworkGateway2);
}
/**
* The stage of the virtualnetworkgatewayconnection definition allowing to specify Peer.
*/
interface WithPeer {
/**
* Specifies peer.
* @param peer The reference to peerings resource
* @return the next definition stage
*/
WithCreate withPeer(SubResource peer);
}
/**
* The stage of the virtualnetworkgatewayconnection definition allowing to specify ResourceGuid.
*/
interface WithResourceGuid {
/**
* Specifies resourceGuid.
* @param resourceGuid The resource GUID property of the VirtualNetworkGatewayConnection resource
* @return the next definition stage
*/
WithCreate withResourceGuid(String resourceGuid);
}
/**
* The stage of the virtualnetworkgatewayconnection definition allowing to specify RoutingWeight.
*/
interface WithRoutingWeight {
/**
* Specifies routingWeight.
* @param routingWeight The routing weight
* @return the next definition stage
*/
WithCreate withRoutingWeight(Integer routingWeight);
}
/**
* The stage of the virtualnetworkgatewayconnection definition allowing to specify SharedKey.
*/
interface WithSharedKey {
/**
* Specifies sharedKey.
* @param sharedKey The IPSec shared key
* @return the next definition stage
*/
WithCreate withSharedKey(String sharedKey);
}
/**
* The stage of the virtualnetworkgatewayconnection definition allowing to specify UsePolicyBasedTrafficSelectors.
*/
interface WithUsePolicyBasedTrafficSelectors {
/**
* Specifies usePolicyBasedTrafficSelectors.
* @param usePolicyBasedTrafficSelectors Enable policy-based traffic selectors
* @return the next definition stage
*/
WithCreate withUsePolicyBasedTrafficSelectors(Boolean usePolicyBasedTrafficSelectors);
}
/**
* The stage of the virtualnetworkgatewayconnection definition allowing to specify VirtualNetworkGateway2.
*/
interface WithVirtualNetworkGateway2 {
/**
* Specifies virtualNetworkGateway2.
* @param virtualNetworkGateway2 The reference to virtual network gateway resource
* @return the next definition stage
*/
WithCreate withVirtualNetworkGateway2(VirtualNetworkGatewayInner virtualNetworkGateway2);
}
/**
* The stage of the definition which contains all the minimum required inputs for
* the resource to be created (via {@link WithCreate#create()}), but also allows
* for any other optional settings to be specified.
*/
interface WithCreate extends Creatable<VirtualNetworkGatewayConnection>, Resource.DefinitionWithTags<WithCreate>, DefinitionStages.WithAuthorizationKey, DefinitionStages.WithConnectionProtocol, DefinitionStages.WithEnableBgp, DefinitionStages.WithEtag, DefinitionStages.WithExpressRouteGatewayBypass, DefinitionStages.WithIpsecPolicies, DefinitionStages.WithLocalNetworkGateway2, DefinitionStages.WithPeer, DefinitionStages.WithResourceGuid, DefinitionStages.WithRoutingWeight, DefinitionStages.WithSharedKey, DefinitionStages.WithUsePolicyBasedTrafficSelectors, DefinitionStages.WithVirtualNetworkGateway2 {
}
}
/**
* The template for a VirtualNetworkGatewayConnection update operation, containing all the settings that can be modified.
*/
interface Update extends Appliable<VirtualNetworkGatewayConnection>, Resource.UpdateWithTags<Update>, UpdateStages.WithAuthorizationKey, UpdateStages.WithConnectionProtocol, UpdateStages.WithEnableBgp, UpdateStages.WithEtag, UpdateStages.WithExpressRouteGatewayBypass, UpdateStages.WithIpsecPolicies, UpdateStages.WithLocalNetworkGateway2, UpdateStages.WithPeer, UpdateStages.WithResourceGuid, UpdateStages.WithRoutingWeight, UpdateStages.WithSharedKey, UpdateStages.WithUsePolicyBasedTrafficSelectors, UpdateStages.WithVirtualNetworkGateway2 {
}
/**
* Grouping of VirtualNetworkGatewayConnection update stages.
*/
interface UpdateStages {
/**
* The stage of the virtualnetworkgatewayconnection update allowing to specify AuthorizationKey.
*/
interface WithAuthorizationKey {
/**
* Specifies authorizationKey.
* @param authorizationKey The authorizationKey
* @return the next update stage
*/
Update withAuthorizationKey(String authorizationKey);
}
/**
* The stage of the virtualnetworkgatewayconnection update allowing to specify ConnectionProtocol.
*/
interface WithConnectionProtocol {
/**
* Specifies connectionProtocol.
* @param connectionProtocol Connection protocol used for this connection. Possible values include: 'IKEv2', 'IKEv1'
* @return the next update stage
*/
Update withConnectionProtocol(VirtualNetworkGatewayConnectionProtocol connectionProtocol);
}
/**
* The stage of the virtualnetworkgatewayconnection update allowing to specify EnableBgp.
*/
interface WithEnableBgp {
/**
* Specifies enableBgp.
* @param enableBgp EnableBgp flag
* @return the next update stage
*/
Update withEnableBgp(Boolean enableBgp);
}
/**
* The stage of the virtualnetworkgatewayconnection update allowing to specify Etag.
*/
interface WithEtag {
/**
* Specifies etag.
* @param etag Gets a unique read-only string that changes whenever the resource is updated
* @return the next update stage
*/
Update withEtag(String etag);
}
/**
* The stage of the virtualnetworkgatewayconnection update allowing to specify ExpressRouteGatewayBypass.
*/
interface WithExpressRouteGatewayBypass {
/**
* Specifies expressRouteGatewayBypass.
* @param expressRouteGatewayBypass Bypass ExpressRoute Gateway for data forwarding
* @return the next update stage
*/
Update withExpressRouteGatewayBypass(Boolean expressRouteGatewayBypass);
}
/**
* The stage of the virtualnetworkgatewayconnection update allowing to specify IpsecPolicies.
*/
interface WithIpsecPolicies {
/**
* Specifies ipsecPolicies.
* @param ipsecPolicies The IPSec Policies to be considered by this connection
* @return the next update stage
*/
Update withIpsecPolicies(List<IpsecPolicy> ipsecPolicies);
}
/**
* The stage of the virtualnetworkgatewayconnection update allowing to specify LocalNetworkGateway2.
*/
interface WithLocalNetworkGateway2 {
/**
* Specifies localNetworkGateway2.
* @param localNetworkGateway2 The reference to local network gateway resource
* @return the next update stage
*/
Update withLocalNetworkGateway2(LocalNetworkGatewayInner localNetworkGateway2);
}
/**
* The stage of the virtualnetworkgatewayconnection update allowing to specify Peer.
*/
interface WithPeer {
/**
* Specifies peer.
* @param peer The reference to peerings resource
* @return the next update stage
*/
Update withPeer(SubResource peer);
}
/**
* The stage of the virtualnetworkgatewayconnection update allowing to specify ResourceGuid.
*/
interface WithResourceGuid {
/**
* Specifies resourceGuid.
* @param resourceGuid The resource GUID property of the VirtualNetworkGatewayConnection resource
* @return the next update stage
*/
Update withResourceGuid(String resourceGuid);
}
/**
* The stage of the virtualnetworkgatewayconnection update allowing to specify RoutingWeight.
*/
interface WithRoutingWeight {
/**
* Specifies routingWeight.
* @param routingWeight The routing weight
* @return the next update stage
*/
Update withRoutingWeight(Integer routingWeight);
}
/**
* The stage of the virtualnetworkgatewayconnection update allowing to specify SharedKey.
*/
interface WithSharedKey {
/**
* Specifies sharedKey.
* @param sharedKey The IPSec shared key
* @return the next update stage
*/
Update withSharedKey(String sharedKey);
}
/**
* The stage of the virtualnetworkgatewayconnection update allowing to specify UsePolicyBasedTrafficSelectors.
*/
interface WithUsePolicyBasedTrafficSelectors {
/**
* Specifies usePolicyBasedTrafficSelectors.
* @param usePolicyBasedTrafficSelectors Enable policy-based traffic selectors
* @return the next update stage
*/
Update withUsePolicyBasedTrafficSelectors(Boolean usePolicyBasedTrafficSelectors);
}
/**
* The stage of the virtualnetworkgatewayconnection update allowing to specify VirtualNetworkGateway2.
*/
interface WithVirtualNetworkGateway2 {
/**
* Specifies virtualNetworkGateway2.
* @param virtualNetworkGateway2 The reference to virtual network gateway resource
* @return the next update stage
*/
Update withVirtualNetworkGateway2(VirtualNetworkGatewayInner virtualNetworkGateway2);
}
}
}
| |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License, version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package io.netty.handler.codec.http2;
import static io.netty.handler.codec.http2.Http2CodecUtil.CONNECTION_STREAM_ID;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_PRIORITY_WEIGHT;
import static io.netty.handler.codec.http2.Http2CodecUtil.DEFAULT_WINDOW_SIZE;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.verifyZeroInteractions;
import static org.mockito.Mockito.when;
import io.netty.buffer.ByteBuf;
import io.netty.channel.Channel;
import io.netty.channel.ChannelConfig;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelPromise;
import io.netty.handler.codec.http2.Http2FrameWriter.Configuration;
import io.netty.util.concurrent.EventExecutor;
import junit.framework.AssertionFailedError;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Tests for {@link DefaultHttp2RemoteFlowController}.
*/
public class DefaultHttp2RemoteFlowControllerTest {
private static final int STREAM_A = 1;
private static final int STREAM_B = 3;
private static final int STREAM_C = 5;
private static final int STREAM_D = 7;
private DefaultHttp2RemoteFlowController controller;
@Mock
private ByteBuf buffer;
@Mock
private Http2FrameSizePolicy frameWriterSizePolicy;
@Mock
private Configuration frameWriterConfiguration;
@Mock
private ChannelHandlerContext ctx;
@Mock
private Channel channel;
@Mock
private ChannelConfig config;
@Mock
private EventExecutor executor;
@Mock
private ChannelPromise promise;
@Mock
private Http2RemoteFlowController.Listener listener;
private DefaultHttp2Connection connection;
@Before
public void setup() throws Http2Exception {
MockitoAnnotations.initMocks(this);
when(ctx.newPromise()).thenReturn(promise);
when(ctx.flush()).thenThrow(new AssertionFailedError("forbidden"));
setChannelWritability(true);
when(channel.config()).thenReturn(config);
when(executor.inEventLoop()).thenReturn(true);
initConnectionAndController();
resetCtx();
// This is intentionally left out of initConnectionAndController so it can be tested below.
controller.channelHandlerContext(ctx);
assertWritabilityChanged(1, true);
reset(listener);
}
private void initConnectionAndController() throws Http2Exception {
connection = new DefaultHttp2Connection(false);
controller = new DefaultHttp2RemoteFlowController(connection, listener);
connection.remote().flowController(controller);
connection.local().createStream(STREAM_A, false);
connection.local().createStream(STREAM_B, false);
Http2Stream streamC = connection.local().createStream(STREAM_C, false);
Http2Stream streamD = connection.local().createStream(STREAM_D, false);
streamC.setPriority(STREAM_A, DEFAULT_PRIORITY_WEIGHT, false);
streamD.setPriority(STREAM_A, DEFAULT_PRIORITY_WEIGHT, false);
}
@Test
public void initialWindowSizeShouldOnlyChangeStreams() throws Http2Exception {
controller.initialWindowSize(0);
assertEquals(DEFAULT_WINDOW_SIZE, window(CONNECTION_STREAM_ID));
assertEquals(0, window(STREAM_A));
assertEquals(0, window(STREAM_B));
assertEquals(0, window(STREAM_C));
assertEquals(0, window(STREAM_D));
assertWritabilityChanged(1, false);
}
@Test
public void windowUpdateShouldChangeConnectionWindow() throws Http2Exception {
incrementWindowSize(CONNECTION_STREAM_ID, 100);
assertEquals(DEFAULT_WINDOW_SIZE + 100, window(CONNECTION_STREAM_ID));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_A));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_B));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_C));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_D));
verifyZeroInteractions(listener);
}
@Test
public void windowUpdateShouldChangeStreamWindow() throws Http2Exception {
incrementWindowSize(STREAM_A, 100);
assertEquals(DEFAULT_WINDOW_SIZE, window(CONNECTION_STREAM_ID));
assertEquals(DEFAULT_WINDOW_SIZE + 100, window(STREAM_A));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_B));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_C));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_D));
verifyZeroInteractions(listener);
}
@Test
public void payloadSmallerThanWindowShouldBeWrittenImmediately() throws Http2Exception {
FakeFlowControlled data = new FakeFlowControlled(5);
sendData(STREAM_A, data);
data.assertNotWritten();
verifyZeroInteractions(listener);
controller.writePendingBytes();
data.assertFullyWritten();
verify(listener, times(1)).streamWritten(stream(STREAM_A), 5);
verifyZeroInteractions(listener);
}
@Test
public void emptyPayloadShouldBeWrittenImmediately() throws Http2Exception {
FakeFlowControlled data = new FakeFlowControlled(0);
sendData(STREAM_A, data);
data.assertNotWritten();
controller.writePendingBytes();
data.assertFullyWritten();
verify(listener, times(1)).streamWritten(stream(STREAM_A), 0);
verifyZeroInteractions(listener);
}
@Test
public void unflushedPayloadsShouldBeDroppedOnCancel() throws Http2Exception {
FakeFlowControlled data = new FakeFlowControlled(5);
sendData(STREAM_A, data);
connection.stream(STREAM_A).close();
controller.writePendingBytes();
data.assertNotWritten();
controller.writePendingBytes();
data.assertNotWritten();
verify(listener, times(1)).writabilityChanged(stream(STREAM_A));
}
@Test
public void payloadsShouldMerge() throws Http2Exception {
controller.initialWindowSize(15);
FakeFlowControlled data1 = new FakeFlowControlled(5, true);
FakeFlowControlled data2 = new FakeFlowControlled(10, true);
sendData(STREAM_A, data1);
sendData(STREAM_A, data2);
data1.assertNotWritten();
data1.assertNotWritten();
data2.assertMerged();
controller.writePendingBytes();
data1.assertFullyWritten();
data2.assertNotWritten();
verify(listener, times(1)).streamWritten(stream(STREAM_A), 15);
verify(listener, times(1)).writabilityChanged(stream(STREAM_A));
assertFalse(controller.isWritable(stream(STREAM_A)));
}
@Test
public void stalledStreamShouldQueuePayloads() throws Http2Exception {
controller.initialWindowSize(0);
verify(listener, times(1)).writabilityChanged(stream(STREAM_A));
assertFalse(controller.isWritable(stream(STREAM_A)));
reset(listener);
FakeFlowControlled data = new FakeFlowControlled(15);
FakeFlowControlled moreData = new FakeFlowControlled(0);
sendData(STREAM_A, data);
controller.writePendingBytes();
data.assertNotWritten();
sendData(STREAM_A, moreData);
controller.writePendingBytes();
moreData.assertNotWritten();
verifyZeroInteractions(listener);
}
@Test
public void queuedPayloadsReceiveErrorOnStreamClose() throws Http2Exception {
controller.initialWindowSize(0);
verify(listener, times(1)).writabilityChanged(stream(STREAM_A));
assertFalse(controller.isWritable(stream(STREAM_A)));
reset(listener);
FakeFlowControlled data = new FakeFlowControlled(15);
FakeFlowControlled moreData = new FakeFlowControlled(0);
sendData(STREAM_A, data);
controller.writePendingBytes();
data.assertNotWritten();
sendData(STREAM_A, moreData);
controller.writePendingBytes();
moreData.assertNotWritten();
connection.stream(STREAM_A).close();
data.assertError();
moreData.assertError();
verifyZeroInteractions(listener);
}
@Test
public void payloadLargerThanWindowShouldWritePartial() throws Http2Exception {
controller.initialWindowSize(5);
verify(listener, never()).writabilityChanged(stream(STREAM_A));
assertTrue(controller.isWritable(stream(STREAM_A)));
reset(listener);
final FakeFlowControlled data = new FakeFlowControlled(10);
sendData(STREAM_A, data);
controller.writePendingBytes();
// Verify that a partial frame of 5 remains to be sent
data.assertPartiallyWritten(5);
verify(listener, times(1)).streamWritten(stream(STREAM_A), 5);
verify(listener, times(1)).writabilityChanged(stream(STREAM_A));
assertFalse(controller.isWritable(stream(STREAM_A)));
verifyNoMoreInteractions(listener);
}
@Test
public void windowUpdateAndFlushShouldTriggerWrite() throws Http2Exception {
controller.initialWindowSize(10);
verify(listener, never()).writabilityChanged(stream(STREAM_A));
assertTrue(controller.isWritable(stream(STREAM_A)));
FakeFlowControlled data = new FakeFlowControlled(20);
FakeFlowControlled moreData = new FakeFlowControlled(10);
sendData(STREAM_A, data);
sendData(STREAM_A, moreData);
controller.writePendingBytes();
data.assertPartiallyWritten(10);
moreData.assertNotWritten();
verify(listener, times(1)).streamWritten(stream(STREAM_A), 10);
verify(listener, times(1)).writabilityChanged(stream(STREAM_A));
assertFalse(controller.isWritable(stream(STREAM_A)));
reset(listener);
resetCtx();
// Update the window and verify that the rest of data and some of moreData are written
incrementWindowSize(STREAM_A, 15);
verify(listener, never()).writabilityChanged(stream(STREAM_A));
assertFalse(controller.isWritable(stream(STREAM_A)));
reset(listener);
controller.writePendingBytes();
data.assertFullyWritten();
moreData.assertPartiallyWritten(5);
verify(listener, times(1)).streamWritten(stream(STREAM_A), 15);
verify(listener, never()).writabilityChanged(stream(STREAM_A));
assertFalse(controller.isWritable(stream(STREAM_A)));
assertEquals(DEFAULT_WINDOW_SIZE - 25, window(CONNECTION_STREAM_ID));
assertEquals(0, window(STREAM_A));
assertEquals(10, window(STREAM_B));
assertEquals(10, window(STREAM_C));
assertEquals(10, window(STREAM_D));
}
@Test
public void initialWindowUpdateShouldSendPayload() throws Http2Exception {
incrementWindowSize(CONNECTION_STREAM_ID, -window(CONNECTION_STREAM_ID) + 10);
assertWritabilityChanged(0, true);
reset(listener);
controller.initialWindowSize(0);
assertWritabilityChanged(1, false);
reset(listener);
FakeFlowControlled data = new FakeFlowControlled(10);
sendData(STREAM_A, data);
controller.writePendingBytes();
data.assertNotWritten();
// Verify that the entire frame was sent.
controller.initialWindowSize(10);
data.assertFullyWritten();
verify(listener, times(1)).streamWritten(stream(STREAM_A), 10);
assertWritabilityChanged(0, false);
}
@Test
public void successiveSendsShouldNotInteract() throws Http2Exception {
// Collapse the connection window to force queueing.
incrementWindowSize(CONNECTION_STREAM_ID, -window(CONNECTION_STREAM_ID));
assertEquals(0, window(CONNECTION_STREAM_ID));
assertWritabilityChanged(1, false);
reset(listener);
FakeFlowControlled dataA = new FakeFlowControlled(10);
// Queue data for stream A and allow most of it to be written.
sendData(STREAM_A, dataA);
controller.writePendingBytes();
dataA.assertNotWritten();
incrementWindowSize(CONNECTION_STREAM_ID, 8);
assertWritabilityChanged(0, false);
reset(listener);
controller.writePendingBytes();
dataA.assertPartiallyWritten(8);
assertEquals(65527, window(STREAM_A));
assertEquals(0, window(CONNECTION_STREAM_ID));
verify(listener, times(1)).streamWritten(stream(STREAM_A), 8);
assertWritabilityChanged(0, false);
reset(listener);
// Queue data for stream B and allow the rest of A and all of B to be written.
FakeFlowControlled dataB = new FakeFlowControlled(10);
sendData(STREAM_B, dataB);
controller.writePendingBytes();
dataB.assertNotWritten();
incrementWindowSize(CONNECTION_STREAM_ID, 12);
assertWritabilityChanged(0, false);
reset(listener);
controller.writePendingBytes();
assertEquals(0, window(CONNECTION_STREAM_ID));
assertWritabilityChanged(0, false);
// Verify the rest of A is written.
dataA.assertFullyWritten();
assertEquals(65525, window(STREAM_A));
verify(listener, times(1)).streamWritten(stream(STREAM_A), 2);
dataB.assertFullyWritten();
assertEquals(65525, window(STREAM_B));
verify(listener, times(1)).streamWritten(stream(STREAM_B), 10);
verifyNoMoreInteractions(listener);
}
@Test
public void negativeWindowShouldNotThrowException() throws Http2Exception {
final int initWindow = 20;
final int secondWindowSize = 10;
controller.initialWindowSize(initWindow);
assertWritabilityChanged(0, true);
reset(listener);
FakeFlowControlled data1 = new FakeFlowControlled(initWindow);
FakeFlowControlled data2 = new FakeFlowControlled(5);
// Deplete the stream A window to 0
sendData(STREAM_A, data1);
controller.writePendingBytes();
data1.assertFullyWritten();
verify(listener, times(1)).streamWritten(stream(STREAM_A), 20);
assertTrue(window(CONNECTION_STREAM_ID) > 0);
verify(listener, times(1)).writabilityChanged(stream(STREAM_A));
verify(listener, never()).writabilityChanged(stream(STREAM_B));
verify(listener, never()).writabilityChanged(stream(STREAM_C));
verify(listener, never()).writabilityChanged(stream(STREAM_D));
assertFalse(controller.isWritable(stream(STREAM_A)));
assertTrue(controller.isWritable(stream(STREAM_B)));
assertTrue(controller.isWritable(stream(STREAM_C)));
assertTrue(controller.isWritable(stream(STREAM_D)));
reset(listener);
// Make the window size for stream A negative
controller.initialWindowSize(initWindow - secondWindowSize);
assertEquals(-secondWindowSize, window(STREAM_A));
verify(listener, never()).writabilityChanged(stream(STREAM_A));
verify(listener, never()).writabilityChanged(stream(STREAM_B));
verify(listener, never()).writabilityChanged(stream(STREAM_C));
verify(listener, never()).writabilityChanged(stream(STREAM_D));
assertFalse(controller.isWritable(stream(STREAM_A)));
assertTrue(controller.isWritable(stream(STREAM_B)));
assertTrue(controller.isWritable(stream(STREAM_C)));
assertTrue(controller.isWritable(stream(STREAM_D)));
reset(listener);
// Queue up a write. It should not be written now because the window is negative
sendData(STREAM_A, data2);
controller.writePendingBytes();
data2.assertNotWritten();
verify(listener, never()).writabilityChanged(stream(STREAM_A));
verify(listener, never()).writabilityChanged(stream(STREAM_B));
verify(listener, never()).writabilityChanged(stream(STREAM_C));
verify(listener, never()).writabilityChanged(stream(STREAM_D));
assertFalse(controller.isWritable(stream(STREAM_A)));
assertTrue(controller.isWritable(stream(STREAM_B)));
assertTrue(controller.isWritable(stream(STREAM_C)));
assertTrue(controller.isWritable(stream(STREAM_D)));
reset(listener);
// Open the window size back up a bit (no send should happen)
incrementWindowSize(STREAM_A, 5);
controller.writePendingBytes();
assertEquals(-5, window(STREAM_A));
data2.assertNotWritten();
verify(listener, never()).writabilityChanged(stream(STREAM_A));
verify(listener, never()).writabilityChanged(stream(STREAM_B));
verify(listener, never()).writabilityChanged(stream(STREAM_C));
verify(listener, never()).writabilityChanged(stream(STREAM_D));
assertFalse(controller.isWritable(stream(STREAM_A)));
assertTrue(controller.isWritable(stream(STREAM_B)));
assertTrue(controller.isWritable(stream(STREAM_C)));
assertTrue(controller.isWritable(stream(STREAM_D)));
reset(listener);
// Open the window size back up a bit (no send should happen)
incrementWindowSize(STREAM_A, 5);
controller.writePendingBytes();
assertEquals(0, window(STREAM_A));
data2.assertNotWritten();
verify(listener, never()).writabilityChanged(stream(STREAM_A));
verify(listener, never()).writabilityChanged(stream(STREAM_B));
verify(listener, never()).writabilityChanged(stream(STREAM_C));
verify(listener, never()).writabilityChanged(stream(STREAM_D));
assertFalse(controller.isWritable(stream(STREAM_A)));
assertTrue(controller.isWritable(stream(STREAM_B)));
assertTrue(controller.isWritable(stream(STREAM_C)));
assertTrue(controller.isWritable(stream(STREAM_D)));
reset(listener);
// Open the window size back up and allow the write to happen
incrementWindowSize(STREAM_A, 5);
controller.writePendingBytes();
data2.assertFullyWritten();
verify(listener, never()).writabilityChanged(stream(STREAM_A));
verify(listener, never()).writabilityChanged(stream(STREAM_B));
verify(listener, never()).writabilityChanged(stream(STREAM_C));
verify(listener, never()).writabilityChanged(stream(STREAM_D));
assertFalse(controller.isWritable(stream(STREAM_A)));
assertTrue(controller.isWritable(stream(STREAM_B)));
assertTrue(controller.isWritable(stream(STREAM_C)));
assertTrue(controller.isWritable(stream(STREAM_D)));
}
@Test
public void initialWindowUpdateShouldSendEmptyFrame() throws Http2Exception {
controller.initialWindowSize(0);
assertWritabilityChanged(1, false);
reset(listener);
// First send a frame that will get buffered.
FakeFlowControlled data = new FakeFlowControlled(10, false);
sendData(STREAM_A, data);
controller.writePendingBytes();
data.assertNotWritten();
// Now send an empty frame on the same stream and verify that it's also buffered.
FakeFlowControlled data2 = new FakeFlowControlled(0, false);
sendData(STREAM_A, data2);
controller.writePendingBytes();
data2.assertNotWritten();
// Re-expand the window and verify that both frames were sent.
controller.initialWindowSize(10);
verify(listener, never()).writabilityChanged(stream(STREAM_A));
verify(listener, times(1)).writabilityChanged(stream(STREAM_B));
verify(listener, times(1)).writabilityChanged(stream(STREAM_C));
verify(listener, times(1)).writabilityChanged(stream(STREAM_D));
assertFalse(controller.isWritable(stream(STREAM_A)));
assertTrue(controller.isWritable(stream(STREAM_B)));
assertTrue(controller.isWritable(stream(STREAM_C)));
assertTrue(controller.isWritable(stream(STREAM_D)));
data.assertFullyWritten();
data2.assertFullyWritten();
verify(listener, times(1)).streamWritten(stream(STREAM_A), 10);
}
@Test
public void initialWindowUpdateShouldSendPartialFrame() throws Http2Exception {
controller.initialWindowSize(0);
assertWritabilityChanged(1, false);
reset(listener);
FakeFlowControlled data = new FakeFlowControlled(10);
sendData(STREAM_A, data);
controller.writePendingBytes();
data.assertNotWritten();
// Verify that a partial frame of 5 was sent.
controller.initialWindowSize(5);
verify(listener, never()).writabilityChanged(stream(STREAM_A));
verify(listener, times(1)).writabilityChanged(stream(STREAM_B));
verify(listener, times(1)).writabilityChanged(stream(STREAM_C));
verify(listener, times(1)).writabilityChanged(stream(STREAM_D));
assertFalse(controller.isWritable(stream(STREAM_A)));
assertTrue(controller.isWritable(stream(STREAM_B)));
assertTrue(controller.isWritable(stream(STREAM_C)));
assertTrue(controller.isWritable(stream(STREAM_D)));
data.assertPartiallyWritten(5);
verify(listener, times(1)).streamWritten(stream(STREAM_A), 5);
}
@Test
public void connectionWindowUpdateShouldSendFrame() throws Http2Exception {
// Set the connection window size to zero.
exhaustStreamWindow(CONNECTION_STREAM_ID);
assertWritabilityChanged(1, false);
reset(listener);
FakeFlowControlled data = new FakeFlowControlled(10);
sendData(STREAM_A, data);
controller.writePendingBytes();
data.assertNotWritten();
assertWritabilityChanged(0, false);
reset(listener);
// Verify that the entire frame was sent.
incrementWindowSize(CONNECTION_STREAM_ID, 10);
assertWritabilityChanged(0, false);
reset(listener);
data.assertNotWritten();
controller.writePendingBytes();
data.assertFullyWritten();
verify(listener, times(1)).streamWritten(stream(STREAM_A), 10);
assertWritabilityChanged(0, false);
assertEquals(0, window(CONNECTION_STREAM_ID));
assertEquals(DEFAULT_WINDOW_SIZE - 10, window(STREAM_A));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_B));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_C));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_D));
}
@Test
public void connectionWindowUpdateShouldSendPartialFrame() throws Http2Exception {
// Set the connection window size to zero.
exhaustStreamWindow(CONNECTION_STREAM_ID);
assertWritabilityChanged(1, false);
reset(listener);
FakeFlowControlled data = new FakeFlowControlled(10);
sendData(STREAM_A, data);
controller.writePendingBytes();
data.assertNotWritten();
// Verify that a partial frame of 5 was sent.
incrementWindowSize(CONNECTION_STREAM_ID, 5);
data.assertNotWritten();
assertWritabilityChanged(0, false);
reset(listener);
controller.writePendingBytes();
data.assertPartiallyWritten(5);
verify(listener, times(1)).streamWritten(stream(STREAM_A), 5);
assertWritabilityChanged(0, false);
assertEquals(0, window(CONNECTION_STREAM_ID));
assertEquals(DEFAULT_WINDOW_SIZE - 5, window(STREAM_A));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_B));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_C));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_D));
}
@Test
public void streamWindowUpdateShouldSendFrame() throws Http2Exception {
// Set the stream window size to zero.
exhaustStreamWindow(STREAM_A);
verify(listener, times(1)).writabilityChanged(stream(STREAM_A));
verify(listener, never()).writabilityChanged(stream(STREAM_B));
verify(listener, never()).writabilityChanged(stream(STREAM_C));
verify(listener, never()).writabilityChanged(stream(STREAM_D));
assertFalse(controller.isWritable(stream(STREAM_A)));
assertTrue(controller.isWritable(stream(STREAM_B)));
assertTrue(controller.isWritable(stream(STREAM_C)));
assertTrue(controller.isWritable(stream(STREAM_D)));
reset(listener);
FakeFlowControlled data = new FakeFlowControlled(10);
sendData(STREAM_A, data);
controller.writePendingBytes();
data.assertNotWritten();
// Verify that the entire frame was sent.
incrementWindowSize(STREAM_A, 10);
verify(listener, never()).writabilityChanged(stream(STREAM_A));
verify(listener, never()).writabilityChanged(stream(STREAM_B));
verify(listener, never()).writabilityChanged(stream(STREAM_C));
verify(listener, never()).writabilityChanged(stream(STREAM_D));
assertFalse(controller.isWritable(stream(STREAM_A)));
assertTrue(controller.isWritable(stream(STREAM_B)));
assertTrue(controller.isWritable(stream(STREAM_C)));
assertTrue(controller.isWritable(stream(STREAM_D)));
reset(listener);
data.assertNotWritten();
controller.writePendingBytes();
data.assertFullyWritten();
verify(listener, times(1)).streamWritten(stream(STREAM_A), 10);
verify(listener, never()).writabilityChanged(stream(STREAM_A));
verify(listener, never()).writabilityChanged(stream(STREAM_B));
verify(listener, never()).writabilityChanged(stream(STREAM_C));
verify(listener, never()).writabilityChanged(stream(STREAM_D));
assertFalse(controller.isWritable(stream(STREAM_A)));
assertTrue(controller.isWritable(stream(STREAM_B)));
assertTrue(controller.isWritable(stream(STREAM_C)));
assertTrue(controller.isWritable(stream(STREAM_D)));
assertEquals(DEFAULT_WINDOW_SIZE - 10, window(CONNECTION_STREAM_ID));
assertEquals(0, window(STREAM_A));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_B));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_C));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_D));
}
@Test
public void streamWindowUpdateShouldSendPartialFrame() throws Http2Exception {
// Set the stream window size to zero.
exhaustStreamWindow(STREAM_A);
verify(listener, times(1)).writabilityChanged(stream(STREAM_A));
verify(listener, never()).writabilityChanged(stream(STREAM_B));
verify(listener, never()).writabilityChanged(stream(STREAM_C));
verify(listener, never()).writabilityChanged(stream(STREAM_D));
assertFalse(controller.isWritable(stream(STREAM_A)));
assertTrue(controller.isWritable(stream(STREAM_B)));
assertTrue(controller.isWritable(stream(STREAM_C)));
assertTrue(controller.isWritable(stream(STREAM_D)));
reset(listener);
FakeFlowControlled data = new FakeFlowControlled(10);
sendData(STREAM_A, data);
controller.writePendingBytes();
data.assertNotWritten();
// Verify that a partial frame of 5 was sent.
incrementWindowSize(STREAM_A, 5);
verify(listener, never()).writabilityChanged(stream(STREAM_A));
verify(listener, never()).writabilityChanged(stream(STREAM_B));
verify(listener, never()).writabilityChanged(stream(STREAM_C));
verify(listener, never()).writabilityChanged(stream(STREAM_D));
assertFalse(controller.isWritable(stream(STREAM_A)));
assertTrue(controller.isWritable(stream(STREAM_B)));
assertTrue(controller.isWritable(stream(STREAM_C)));
assertTrue(controller.isWritable(stream(STREAM_D)));
reset(listener);
data.assertNotWritten();
controller.writePendingBytes();
data.assertPartiallyWritten(5);
verify(listener, times(1)).streamWritten(stream(STREAM_A), 5);
assertEquals(DEFAULT_WINDOW_SIZE - 5, window(CONNECTION_STREAM_ID));
assertEquals(0, window(STREAM_A));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_B));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_C));
assertEquals(DEFAULT_WINDOW_SIZE, window(STREAM_D));
}
@Test
public void flowControlledWriteThrowsAnException() throws Exception {
final Http2RemoteFlowController.FlowControlled flowControlled = mockedFlowControlledThatThrowsOnWrite();
final Http2Stream stream = stream(STREAM_A);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocationOnMock) {
stream.closeLocalSide();
return null;
}
}).when(flowControlled).error(any(ChannelHandlerContext.class), any(Throwable.class));
int windowBefore = window(STREAM_A);
controller.addFlowControlled(stream, flowControlled);
controller.writePendingBytes();
verify(flowControlled, times(3)).write(any(ChannelHandlerContext.class), anyInt());
verify(flowControlled).error(any(ChannelHandlerContext.class), any(Throwable.class));
verify(flowControlled, never()).writeComplete();
assertEquals(90, windowBefore - window(STREAM_A));
verify(listener, times(1)).streamWritten(stream(STREAM_A), 90);
assertWritabilityChanged(0, true);
}
@Test
public void flowControlledWriteAndErrorThrowAnException() throws Exception {
final Http2RemoteFlowController.FlowControlled flowControlled = mockedFlowControlledThatThrowsOnWrite();
final Http2Stream stream = stream(STREAM_A);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocationOnMock) {
throw new RuntimeException("error failed");
}
}).when(flowControlled).error(any(ChannelHandlerContext.class), any(Throwable.class));
int windowBefore = window(STREAM_A);
boolean exceptionThrown = false;
try {
controller.addFlowControlled(stream, flowControlled);
controller.writePendingBytes();
} catch (RuntimeException e) {
exceptionThrown = true;
} finally {
assertTrue(exceptionThrown);
}
verify(flowControlled, times(3)).write(any(ChannelHandlerContext.class), anyInt());
verify(flowControlled).error(any(ChannelHandlerContext.class), any(Throwable.class));
verify(flowControlled, never()).writeComplete();
assertEquals(90, windowBefore - window(STREAM_A));
verifyZeroInteractions(listener);
}
@Test
public void flowControlledWriteCompleteThrowsAnException() throws Exception {
final Http2RemoteFlowController.FlowControlled flowControlled =
Mockito.mock(Http2RemoteFlowController.FlowControlled.class);
final AtomicInteger size = new AtomicInteger(150);
doAnswer(new Answer<Integer>() {
@Override
public Integer answer(InvocationOnMock invocationOnMock) throws Throwable {
return size.get();
}
}).when(flowControlled).size();
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocationOnMock) throws Throwable {
size.addAndGet(-50);
return null;
}
}).when(flowControlled).write(any(ChannelHandlerContext.class), anyInt());
final Http2Stream stream = stream(STREAM_A);
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocationOnMock) {
throw new RuntimeException("writeComplete failed");
}
}).when(flowControlled).writeComplete();
int windowBefore = window(STREAM_A);
try {
controller.addFlowControlled(stream, flowControlled);
controller.writePendingBytes();
} catch (Exception e) {
fail();
}
verify(flowControlled, times(3)).write(any(ChannelHandlerContext.class), anyInt());
verify(flowControlled, never()).error(any(ChannelHandlerContext.class), any(Throwable.class));
verify(flowControlled).writeComplete();
assertEquals(150, windowBefore - window(STREAM_A));
verify(listener, times(1)).streamWritten(stream(STREAM_A), 150);
assertWritabilityChanged(0, true);
}
@Test
public void closeStreamInFlowControlledError() throws Exception {
final Http2RemoteFlowController.FlowControlled flowControlled =
Mockito.mock(Http2RemoteFlowController.FlowControlled.class);
final Http2Stream stream = stream(STREAM_A);
when(flowControlled.size()).thenReturn(100);
doThrow(new RuntimeException("write failed"))
.when(flowControlled).write(any(ChannelHandlerContext.class), anyInt());
doAnswer(new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocationOnMock) {
stream.close();
return null;
}
}).when(flowControlled).error(any(ChannelHandlerContext.class), any(Throwable.class));
controller.addFlowControlled(stream, flowControlled);
controller.writePendingBytes();
verify(flowControlled).write(any(ChannelHandlerContext.class), anyInt());
verify(flowControlled).error(any(ChannelHandlerContext.class), any(Throwable.class));
verify(flowControlled, never()).writeComplete();
verify(listener, times(1)).streamWritten(stream(STREAM_A), 0);
verify(listener, times(1)).writabilityChanged(stream(STREAM_A));
verify(listener, never()).writabilityChanged(stream(STREAM_B));
verify(listener, never()).writabilityChanged(stream(STREAM_C));
verify(listener, never()).writabilityChanged(stream(STREAM_D));
assertFalse(controller.isWritable(stream(STREAM_A)));
assertTrue(controller.isWritable(stream(STREAM_B)));
assertTrue(controller.isWritable(stream(STREAM_C)));
assertTrue(controller.isWritable(stream(STREAM_D)));
}
@Test
public void nonWritableChannelDoesNotAttemptToWrite() throws Exception {
// Start the channel as not writable and exercise the public methods of the flow controller
// making sure no frames are written.
setChannelWritability(false);
assertWritabilityChanged(1, false);
reset(listener);
FakeFlowControlled dataA = new FakeFlowControlled(1);
FakeFlowControlled dataB = new FakeFlowControlled(1);
final Http2Stream stream = stream(STREAM_A);
controller.addFlowControlled(stream, dataA);
controller.writePendingBytes();
dataA.assertNotWritten();
controller.incrementWindowSize(stream, 100);
controller.writePendingBytes();
dataA.assertNotWritten();
controller.addFlowControlled(stream, dataB);
controller.writePendingBytes();
dataA.assertNotWritten();
dataB.assertNotWritten();
assertWritabilityChanged(0, false);
// Now change the channel to writable and make sure frames are written.
setChannelWritability(true);
assertWritabilityChanged(1, true);
controller.writePendingBytes();
dataA.assertFullyWritten();
dataB.assertFullyWritten();
}
@Test
public void contextShouldSendQueuedFramesWhenSet() throws Exception {
// Re-initialize the controller so we can ensure the context hasn't been set yet.
initConnectionAndController();
FakeFlowControlled dataA = new FakeFlowControlled(1);
final Http2Stream stream = stream(STREAM_A);
// Queue some frames
controller.addFlowControlled(stream, dataA);
dataA.assertNotWritten();
controller.incrementWindowSize(stream, 100);
dataA.assertNotWritten();
assertWritabilityChanged(0, false);
// Set the controller
controller.channelHandlerContext(ctx);
dataA.assertFullyWritten();
assertWritabilityChanged(1, true);
}
@Test
public void initialWindowSizeWithNoContextShouldNotThrow() throws Exception {
// Re-initialize the controller so we can ensure the context hasn't been set yet.
initConnectionAndController();
FakeFlowControlled dataA = new FakeFlowControlled(1);
final Http2Stream stream = stream(STREAM_A);
// Queue some frames
controller.addFlowControlled(stream, dataA);
dataA.assertNotWritten();
// Set the controller
controller.channelHandlerContext(ctx);
dataA.assertFullyWritten();
}
private void assertWritabilityChanged(int amt, boolean writable) {
verify(listener, times(amt)).writabilityChanged(stream(STREAM_A));
verify(listener, times(amt)).writabilityChanged(stream(STREAM_B));
verify(listener, times(amt)).writabilityChanged(stream(STREAM_C));
verify(listener, times(amt)).writabilityChanged(stream(STREAM_D));
if (writable) {
assertTrue(controller.isWritable(stream(STREAM_A)));
assertTrue(controller.isWritable(stream(STREAM_B)));
assertTrue(controller.isWritable(stream(STREAM_C)));
assertTrue(controller.isWritable(stream(STREAM_D)));
} else {
assertFalse(controller.isWritable(stream(STREAM_A)));
assertFalse(controller.isWritable(stream(STREAM_B)));
assertFalse(controller.isWritable(stream(STREAM_C)));
assertFalse(controller.isWritable(stream(STREAM_D)));
}
}
private static Http2RemoteFlowController.FlowControlled mockedFlowControlledThatThrowsOnWrite() throws Exception {
final Http2RemoteFlowController.FlowControlled flowControlled =
Mockito.mock(Http2RemoteFlowController.FlowControlled.class);
when(flowControlled.size()).thenReturn(100);
doAnswer(new Answer<Void>() {
private int invocationCount;
@Override
public Void answer(InvocationOnMock invocationOnMock) throws Throwable {
switch(invocationCount) {
case 0:
when(flowControlled.size()).thenReturn(50);
invocationCount = 1;
return null;
case 1:
when(flowControlled.size()).thenReturn(20);
invocationCount = 2;
return null;
default:
when(flowControlled.size()).thenReturn(10);
throw new RuntimeException("Write failed");
}
}
}).when(flowControlled).write(any(ChannelHandlerContext.class), anyInt());
return flowControlled;
}
private void sendData(int streamId, FakeFlowControlled data) throws Http2Exception {
Http2Stream stream = stream(streamId);
controller.addFlowControlled(stream, data);
}
private void exhaustStreamWindow(int streamId) throws Http2Exception {
incrementWindowSize(streamId, -window(streamId));
}
private int window(int streamId) throws Http2Exception {
return controller.windowSize(stream(streamId));
}
private void incrementWindowSize(int streamId, int delta) throws Http2Exception {
controller.incrementWindowSize(stream(streamId), delta);
}
private Http2Stream stream(int streamId) {
return connection.stream(streamId);
}
private void resetCtx() {
reset(ctx);
when(ctx.channel()).thenReturn(channel);
when(ctx.executor()).thenReturn(executor);
}
private void setChannelWritability(boolean isWritable) throws Http2Exception {
when(channel.bytesBeforeUnwritable()).thenReturn(isWritable ? Long.MAX_VALUE : 0);
when(channel.isWritable()).thenReturn(isWritable);
if (controller != null) {
controller.channelWritabilityChanged();
}
}
private static final class FakeFlowControlled implements Http2RemoteFlowController.FlowControlled {
private int currentSize;
private int originalSize;
private boolean writeCalled;
private final boolean mergeable;
private boolean merged;
private Throwable t;
private FakeFlowControlled(int size) {
this.currentSize = size;
this.originalSize = size;
this.mergeable = false;
}
private FakeFlowControlled(int size, boolean mergeable) {
this.currentSize = size;
this.originalSize = size;
this.mergeable = mergeable;
}
@Override
public int size() {
return currentSize;
}
@Override
public void error(ChannelHandlerContext ctx, Throwable t) {
this.t = t;
}
@Override
public void writeComplete() {
}
@Override
public void write(ChannelHandlerContext ctx, int allowedBytes) {
if (allowedBytes <= 0 && currentSize != 0) {
// Write has been called but no data can be written
return;
}
writeCalled = true;
int written = Math.min(currentSize, allowedBytes);
currentSize -= written;
}
@Override
public boolean merge(ChannelHandlerContext ctx, Http2RemoteFlowController.FlowControlled next) {
if (mergeable && next instanceof FakeFlowControlled) {
this.originalSize += ((FakeFlowControlled) next).originalSize;
this.currentSize += ((FakeFlowControlled) next).originalSize;
((FakeFlowControlled) next).merged = true;
return true;
}
return false;
}
public int written() {
return originalSize - currentSize;
}
public void assertNotWritten() {
assertFalse(writeCalled);
}
public void assertPartiallyWritten(int expectedWritten) {
assertPartiallyWritten(expectedWritten, 0);
}
public void assertPartiallyWritten(int expectedWritten, int delta) {
assertTrue(writeCalled);
assertEquals(expectedWritten, written(), delta);
}
public void assertFullyWritten() {
assertTrue(writeCalled);
assertEquals(0, currentSize);
}
public boolean assertMerged() {
return merged;
}
public void assertError() {
assertNotNull(t);
}
}
}
| |
/*
===========================================================================
Copyright (c) 2010 BrickRed Technologies Limited
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sub-license, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
===========================================================================
*/
package org.brickred.socialauth.provider;
import java.io.InputStream;
import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.brickred.socialauth.AbstractProvider;
import org.brickred.socialauth.AuthProvider;
import org.brickred.socialauth.Contact;
import org.brickred.socialauth.Permission;
import org.brickred.socialauth.Profile;
import org.brickred.socialauth.exception.AccessTokenExpireException;
import org.brickred.socialauth.exception.ProviderStateException;
import org.brickred.socialauth.exception.ServerDataException;
import org.brickred.socialauth.exception.SocialAuthConfigurationException;
import org.brickred.socialauth.exception.SocialAuthException;
import org.brickred.socialauth.exception.UserDeniedPermissionException;
import org.brickred.socialauth.oauthstrategy.OAuthStrategyBase;
import org.brickred.socialauth.util.AccessGrant;
import org.brickred.socialauth.util.BirthDate;
import org.brickred.socialauth.util.Constants;
import org.brickred.socialauth.util.HttpUtil;
import org.brickred.socialauth.util.MethodType;
import org.brickred.socialauth.util.OAuthConfig;
import org.brickred.socialauth.util.Response;
import org.brickred.socialauth.util.SocialAuthUtil;
import org.json.JSONArray;
import org.json.JSONObject;
public class YammerImpl extends AbstractProvider implements AuthProvider,
Serializable {
private static final long serialVersionUID = 8671863515161132392L;
private static final String AUTHORIZATION_URL = "https://www.yammer.com/dialog/oauth?client_id=%1$s&redirect_uri=%2$s";
private static final String ACCESS_TOKEN_URL = "https://www.yammer.com/oauth2/access_token.json?client_id=%1$s&client_secret=%2$s&code=%3$s";
private static final String UPDATE_STATUS_URL = "https://www.yammer.com/api/v1/messages.json";
private static final String PROFILE_URL = "https://www.yammer.com/api/v1/users/%1$s.json?access_token=%2$s";
private static final String CONTACTS_URL = "https://www.yammer.com/api/v1/users.json?sort_by=followers&access_token=%1$s";
private final Log LOG = LogFactory.getLog(YammerImpl.class);
private String accessToken;
private String successUrl;
private boolean isVerify;
private OAuthConfig config;
private Permission scope;
private AccessGrant accessGrant;
private Profile userProfile;
private String profileId;
private boolean providerState = false;
/**
* Stores configuration for the provider
*
* @param providerConfig
* It contains the configuration of application like consumer key
* and consumer secret
* @throws Exception
*/
public YammerImpl(final OAuthConfig providerConfig) throws Exception {
config = providerConfig;
if (config.getCustomPermissions() != null) {
scope = Permission.CUSTOM;
}
}
/**
* Stores access grant for the provider
*
* @param accessGrant
* It contains the access token and other information
* @throws AccessTokenExpireException
*/
@Override
public void setAccessGrant(final AccessGrant accessGrant)
throws AccessTokenExpireException {
this.accessGrant = accessGrant;
accessToken = accessGrant.getKey();
isVerify = true;
}
/**
* This is the most important action. It redirects the browser to an
* appropriate URL which will be used for authentication with the provider
* that has been set using setId()
*
* @throws Exception
*/
@Override
public String getLoginRedirectURL(final String successUrl) throws Exception {
LOG.info("Determining URL for redirection");
providerState = true;
try {
this.successUrl = URLEncoder.encode(successUrl, Constants.ENCODING);
} catch (UnsupportedEncodingException e) {
this.successUrl = successUrl;
}
String url = String.format(AUTHORIZATION_URL, config.get_consumerKey(),
this.successUrl);
String scopeStr = getScope();
if (scopeStr != null) {
url += "&scope=" + scopeStr;
}
LOG.info("Redirection to following URL should happen : " + url);
return url;
}
/**
* Verifies the user when the external provider redirects back to our
* application.
*
*
* @param requestParams
* request parameters, received from the provider
* @return Profile object containing the profile information
* @throws Exception
*/
@Override
public Profile verifyResponse(final Map<String, String> requestParams)
throws Exception {
return doVerifyResponse(requestParams);
}
/**
* @param requestParams
* @return
* @throws Exception
*/
private Profile doVerifyResponse(final Map<String, String> requestParams)
throws Exception {
LOG.info("Retrieving Access Token in verify response function");
if (requestParams.get("error") != null
&& "access_denied".equals(requestParams.get("error"))) {
throw new UserDeniedPermissionException();
}
if (!providerState) {
throw new ProviderStateException();
}
String code = requestParams.get("code");
if (code == null || code.length() == 0) {
throw new SocialAuthException("Verification code is null");
}
String url = String.format(ACCESS_TOKEN_URL, config.get_consumerKey(),
config.get_consumerSecret(), code);
LOG.debug("Verification Code : " + code);
Response response;
try {
response = HttpUtil.doHttpRequest(url, MethodType.GET.toString(),
null, null);
} catch (Exception e) {
throw new SocialAuthException("Error in url : " + e);
}
String result = null;
if (response.getStatus() == 200) {
try {
result = response.getResponseBodyAsString(Constants.ENCODING);
} catch (Exception exc) {
throw new SocialAuthException("Failed to parse response", exc);
}
}
if (result == null || result.length() == 0) {
String errorMessage = null;
try {
errorMessage = response
.getErrorStreamAsString(Constants.ENCODING);
} catch (Exception e) {
// do nothing
}
if (errorMessage == null) {
errorMessage = "Problem in getting Access Token. Application key or Secret key may be wrong."
+ "The server running the application should be same that was registered to get the keys.";
}
throw new SocialAuthConfigurationException(errorMessage);
}
JSONObject resp = new JSONObject(result);
JSONObject accessTokenObject = resp.getJSONObject("access_token");
accessToken = accessTokenObject.getString("token");
LOG.debug("Access Token : " + accessToken);
if (accessToken != null) {
isVerify = true;
accessGrant = new AccessGrant();
accessGrant.setKey(accessToken);
if (scope != null) {
accessGrant.setPermission(scope);
} else {
accessGrant.setPermission(Permission.ALL);
}
if (accessTokenObject.has("user_id")) {
profileId = accessTokenObject.getString("user_id");
}
accessGrant.setAttribute("profileId", profileId);
accessGrant.setProviderId(getProviderId());
return getProfile();
} else {
throw new SocialAuthException(
"Access token and expires not found from "
+ ACCESS_TOKEN_URL);
}
}
/**
* Gets the list of contacts of the user. this may not be available for all
* providers.
*
* @return List of contact objects representing Contacts. Only name will be
* available
*/
@Override
public List<Contact> getContactList() throws Exception {
if (!isVerify || accessToken == null) {
throw new SocialAuthException(
"Please call verifyResponse function first to get Access Token");
}
List<Contact> plist = new ArrayList<Contact>();
String contactURL = String.format(CONTACTS_URL, accessToken);
Map<String, String> headerParam = new HashMap<String, String>();
headerParam.put("Authorization", "Bearer " + accessToken);
LOG.info("Fetching contacts from " + contactURL);
String respStr;
try {
Response response = HttpUtil.doHttpRequest(contactURL,
MethodType.GET.toString(), null, headerParam);
respStr = response.getResponseBodyAsString(Constants.ENCODING);
} catch (Exception e) {
throw new SocialAuthException("Error while getting contacts from "
+ contactURL, e);
}
try {
LOG.debug("User Contacts list in json : " + respStr);
JSONArray resp = new JSONArray(respStr);
for (int i = 0; i < resp.length(); i++) {
JSONObject obj = resp.getJSONObject(i);
Contact p = new Contact();
String name = obj.getString("full_name");
p.setDisplayName(name);
JSONObject userContactDetails = obj.getJSONObject("contact");
JSONArray emailArr = userContactDetails
.getJSONArray("email_addresses");
JSONObject eobj = emailArr.getJSONObject(0);
if (eobj.has("address")) {
p.setEmail(eobj.optString("address", null));
}
p.setId(obj.optString("id", null));
p.setProfileUrl(obj.optString("web_url", null));
if (config.isSaveRawResponse()) {
p.setRawResponse(obj.toString());
}
plist.add(p);
}
} catch (Exception e) {
throw new ServerDataException(
"Failed to parse the user profile json : " + respStr, e);
}
return plist;
}
/**
* Updates the status on the chosen provider if available. This may not be
* implemented for all providers.
*
* @param msg
* Message to be shown as user's status
* @throws Exception
*/
@Override
public Response updateStatus(final String msg) throws Exception {
LOG.info("Updating status : " + msg);
if (!isVerify || accessToken == null) {
throw new SocialAuthException(
"Please call verifyResponse function first to get Access Token and then update status");
}
if (msg == null || msg.trim().length() == 0) {
throw new ServerDataException("Status cannot be blank");
}
Map<String, String> headerParam = new HashMap<String, String>();
headerParam.put("Authorization", "Bearer " + accessToken);
headerParam.put("Content-Type", "application/json");
headerParam.put("Accept", "application/json");
String msgBody = "{\"body\" : \"" + msg + "\"}";
Response serviceResponse;
try {
serviceResponse = HttpUtil.doHttpRequest(UPDATE_STATUS_URL,
MethodType.POST.toString(), msgBody, headerParam);
if (serviceResponse.getStatus() != 201) {
throw new SocialAuthException(
"Status not updated. Return Status code :"
+ serviceResponse.getStatus());
}
} catch (Exception e) {
throw new SocialAuthException(e);
}
return serviceResponse;
}
/**
* Logout
*/
@Override
public void logout() {
accessToken = null;
accessGrant = null;
}
/**
* @return
* @throws Exception
*/
private Profile getProfile() throws Exception {
if (!isVerify || accessToken == null) {
throw new SocialAuthException(
"Please call verifyResponse function first to get Access Token and then update status");
}
Profile p = new Profile();
Response serviceResponse;
if (profileId == null) {
profileId = (String) accessGrant.getAttribute("profileId");
}
String profileURL = String.format(PROFILE_URL, profileId, accessToken);
Map<String, String> headerParam = new HashMap<String, String>();
headerParam.put("Authorization", "Bearer " + accessToken);
try {
serviceResponse = HttpUtil.doHttpRequest(profileURL, "GET", null,
headerParam);
} catch (Exception e) {
throw new SocialAuthException(
"Failed to retrieve the user profile from " + profileURL,
e);
}
String result;
try {
result = serviceResponse
.getResponseBodyAsString(Constants.ENCODING);
LOG.debug("User Profile :" + result);
} catch (Exception e) {
throw new SocialAuthException("Failed to read response from "
+ profileURL, e);
}
try {
JSONObject resp = new JSONObject(result);
p.setFullName(resp.optString("full_name", null));
p.setLocation(resp.optString("location", null));
p.setProfileImageURL(resp.optString("mugshot_url", null));
if (resp.has("birth_date")) {
String dstr = resp.getString("birth_date");
if (dstr != null) {
String arr[] = dstr.split("\\s+");
BirthDate bd = new BirthDate();
if (arr.length == 1) {
Calendar currentDate = Calendar.getInstance();
bd.setMonth(currentDate.get(Calendar.MONTH) + 1);
bd.setDay(currentDate.get(Calendar.DAY_OF_MONTH));
} else {
if (arr.length > 0) {
bd.setDay(Integer.parseInt(arr[1]));
}
if (arr.length > 1) {
bd.setMonth(new Integer(SocialAuthUtil
.getMonthInInt(arr[0])));
}
}
p.setDob(bd);
}
}
JSONObject userContactDetails = resp.getJSONObject("contact");
JSONArray emailArr = userContactDetails
.getJSONArray("email_addresses");
JSONObject eobj = emailArr.getJSONObject(0);
if (eobj.has("address")) {
p.setEmail(eobj.optString("address", null));
}
p.setProviderId(getProviderId());
if (config.isSaveRawResponse()) {
p.setRawResponse(result);
}
userProfile = p;
return userProfile;
} catch (Exception e) {
throw new SocialAuthException(
"Failed to parse the user profile json : " + result, e);
}
}
/**
*
* @param p
* Permission object which can be Permission.AUHTHENTICATE_ONLY,
* Permission.ALL, Permission.DEFAULT
*/
@Override
public void setPermission(final Permission p) {
LOG.debug("Permission requested : " + p.toString());
this.scope = p;
}
/**
* Makes HTTP request to a given URL.It attaches access token in URL.
*
* @param url
* URL to make HTTP request.
* @param methodType
* Method type can be GET, POST or PUT
* @param params
* @param headerParams
* Parameters need to pass as Header Parameters
* @param body
* Request Body
* @return Response object
* @throws Exception
*/
@Override
public Response api(final String url, final String methodType,
final Map<String, String> params,
final Map<String, String> headerParams, final String body)
throws Exception {
if (!isVerify || accessToken == null) {
throw new SocialAuthException(
"Please call verifyResponse function first to get Access Token");
}
Map<String, String> headerParam = new HashMap<String, String>();
headerParam.put("Content-Type", "application/json");
headerParam.put("Accept", "application/json");
if (headerParams != null) {
headerParam.putAll(headerParams);
}
headerParam.put("Authorization", "Bearer " + accessToken);
Response serviceResponse;
LOG.debug("Calling URL : " + url);
LOG.debug("Header Params : " + headerParam.toString());
try {
serviceResponse = HttpUtil.doHttpRequest(url, methodType, body,
headerParam);
} catch (Exception e) {
throw new SocialAuthException(
"Error while making request to URL : " + url, e);
}
if (serviceResponse.getStatus() != 200
&& serviceResponse.getStatus() != 201) {
LOG.debug("Return statuc for URL " + url + " is "
+ serviceResponse.getStatus());
throw new SocialAuthException("Error while making request to URL :"
+ url + "Status : " + serviceResponse.getStatus());
}
return serviceResponse;
}
/**
* Retrieves the user profile.
*
* @return Profile object containing the profile information.
*/
@Override
public Profile getUserProfile() throws Exception {
if (userProfile == null && accessToken != null) {
this.getProfile();
}
return userProfile;
}
@Override
public AccessGrant getAccessGrant() {
return accessGrant;
}
@Override
public String getProviderId() {
return config.getId();
}
@Override
public Response uploadImage(final String message, final String fileName,
final InputStream inputStream) throws Exception {
LOG.warn("WARNING: Not implemented for Yammer");
throw new SocialAuthException(
"Upload Image is not implemented for Yammer");
}
private String getScope() {
String scopeStr = null;
if (Permission.CUSTOM.equals(scope)) {
scopeStr = config.getCustomPermissions();
}
return scopeStr;
}
@Override
protected List<String> getPluginsList() {
List<String> list = new ArrayList<String>();
if (config.getRegisteredPlugins() != null
&& config.getRegisteredPlugins().length > 0) {
list.addAll(Arrays.asList(config.getRegisteredPlugins()));
}
return list;
}
@Override
protected OAuthStrategyBase getOauthStrategy() {
return null;
}
}
| |
// GreenPOS is a point of sales application designed for touch screens.
// Copyright (C) 2007-2009 Openbravo, S.L.
// http://code.google.com/p/openbravocustom/
//
// This file is part of GreenPOS.
//
// GreenPOS is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// GreenPOS is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with GreenPOS. If not, see <http://www.gnu.org/licenses/>.
package mappoieditor;
import java.awt.Color;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.awt.geom.AffineTransform;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import javax.imageio.ImageIO;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import javax.swing.JFileChooser;
import javax.swing.JOptionPane;
import javax.swing.filechooser.FileFilter;
public class JImageEditor extends javax.swing.JPanel {
private Dimension m_maxsize;
private ZoomIcon m_icon;
private BufferedImage m_Img = null;
private static File m_fCurrentDirectory = null;
private static NumberFormat m_percentformat = new DecimalFormat(
"#,##0.##%");
/** Creates new form JImageEditor */
public JImageEditor() {
initComponents();
m_Img = null;
m_maxsize = null;
m_icon = new ZoomIcon();
m_jImage.setIcon(m_icon);
m_jPercent.setText(m_percentformat.format(m_icon.getZoom()));
privateSetEnabled(isEnabled());
}
public void setMaxDimensions(Dimension size) {
m_maxsize = size;
}
public Dimension getMaxDimensions() {
return m_maxsize;
}
public void setEnabled(boolean value) {
privateSetEnabled(value);
super .setEnabled(value);
}
private void privateSetEnabled(boolean value) {
m_jbtnopen.setEnabled(value);
m_jbtnclose.setEnabled(value && (m_Img != null));
m_jbtnzoomin.setEnabled(value && (m_Img != null));
m_jbtnzoomout.setEnabled(value && (m_Img != null));
m_jPercent.setEnabled(value && (m_Img != null));
m_jScr.setEnabled(value && (m_Img != null));
}
public void setImage(BufferedImage img) {
BufferedImage oldimg = m_Img;
m_Img = img;
m_icon.setIcon(m_Img == null ? null : new ImageIcon(m_Img));
m_jPercent.setText(m_percentformat.format(m_icon.getZoom()));
m_jImage.revalidate();
m_jScr.revalidate();
m_jScr.repaint();
privateSetEnabled(isEnabled());
firePropertyChange("image", oldimg, m_Img);
}
public BufferedImage getImage() {
return m_Img;
}
public double getZoom() {
return m_icon.getZoom();
}
public void setZoom(double zoom) {
double oldzoom = m_icon.getZoom();
m_icon.setZoom(zoom);
m_jPercent.setText(m_percentformat.format(m_icon.getZoom()));
m_jImage.revalidate();
m_jScr.revalidate();
m_jScr.repaint();
firePropertyChange("zoom", oldzoom, zoom);
}
public void incZoom() {
double zoom = m_icon.getZoom();
setZoom(zoom > 4.0 ? 8.0 : zoom * 2.0);
}
public void decZoom() {
double zoom = m_icon.getZoom();
setZoom(zoom < 0.5 ? 0.25 : zoom / 2.0);
}
public void doLoad() {
JFileChooser fc = new JFileChooser(m_fCurrentDirectory);
if (fc.showOpenDialog(this ) == JFileChooser.APPROVE_OPTION) {
try {
BufferedImage img = ImageIO.read(fc.getSelectedFile());
if (img != null) {
// compruebo que no exceda el tamano maximo.
if (m_maxsize != null
&& (img.getHeight() > m_maxsize.height || img
.getWidth() > m_maxsize.width)) {
if (JOptionPane
.showConfirmDialog(
this ,
"resize image",
"editor",
JOptionPane.YES_NO_OPTION,
JOptionPane.QUESTION_MESSAGE) == JOptionPane.YES_OPTION) {
// Redimensionamos la imagen para que se ajuste
img = resizeImage(img);
}
}
setImage(img);
m_fCurrentDirectory = fc.getCurrentDirectory();
}
} catch (IOException eIO) {
}
}
}
private BufferedImage resizeImage(BufferedImage img) {
int myheight = img.getHeight();
int mywidth = img.getWidth();
if (myheight > m_maxsize.height) {
mywidth = (int) (mywidth * m_maxsize.height / myheight);
myheight = m_maxsize.height;
}
if (mywidth > m_maxsize.width) {
myheight = (int) (myheight * m_maxsize.width / mywidth);
mywidth = m_maxsize.width;
}
BufferedImage thumb = new BufferedImage(mywidth, myheight,
BufferedImage.TYPE_4BYTE_ABGR);
double scalex = (double) mywidth / (double) img.getWidth(null);
double scaley = (double) myheight
/ (double) img.getHeight(null);
Graphics2D g2d = thumb.createGraphics();
g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
//g2d.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BICUBIC);
g2d.setRenderingHint(RenderingHints.KEY_INTERPOLATION,
RenderingHints.VALUE_INTERPOLATION_BILINEAR);
g2d.setColor(new Color(0, 0, 0, 0)); // Transparent
g2d.fillRect(0, 0, mywidth, myheight);
if (scalex < scaley) {
g2d.drawImage(img, 0, (int) ((myheight - img
.getHeight(null)
* scalex) / 2.0), mywidth, (int) (img
.getHeight(null) * scalex), null);
} else {
g2d
.drawImage(img, (int) ((mywidth - img
.getWidth(null)
* scaley) / 2.0), 0, (int) (img
.getWidth(null) * scaley), myheight, null);
}
g2d.dispose();
return thumb;
}
private static class ZoomIcon implements Icon {
private Icon ico;
private double zoom;
public ZoomIcon() {
this .ico = null;
this .zoom = 1.0;
}
public int getIconHeight() {
return ico == null ? 0 : (int) (zoom * ico.getIconHeight());
}
public int getIconWidth() {
return ico == null ? 0 : (int) (zoom * ico.getIconWidth());
}
public void paintIcon(Component c, Graphics g, int x, int y) {
if (ico != null) {
Graphics2D g2d = (Graphics2D) g;
g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING,
RenderingHints.VALUE_ANTIALIAS_ON);
g2d.setRenderingHint(RenderingHints.KEY_INTERPOLATION,
RenderingHints.VALUE_INTERPOLATION_BILINEAR);
AffineTransform oldt = g2d.getTransform();
g2d.transform(AffineTransform.getScaleInstance(zoom,
zoom));
ico.paintIcon(c, g2d, (int) (x / zoom),
(int) (y / zoom));
g2d.setTransform(oldt);
}
}
public void setIcon(Icon ico) {
this .ico = ico;
this .zoom = 1.0;
}
public void setZoom(double zoom) {
this .zoom = zoom;
}
public double getZoom() {
return zoom;
}
}
private static class ExtensionsFilter extends FileFilter {
private String message;
private String[] extensions;
public ExtensionsFilter(String message, String... extensions) {
this .message = message;
this .extensions = extensions;
}
public boolean accept(java.io.File f) {
if (f.isDirectory()) {
return true;
} else {
String sFileName = f.getName();
int ipos = sFileName.lastIndexOf('.');
if (ipos >= 0) {
String sExt = sFileName.substring(ipos + 1);
for (String s : extensions) {
if (s.equalsIgnoreCase(sExt)) {
return true;
}
}
}
return false;
}
}
public String getDescription() {
return message;
}
}
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
m_jScr = new javax.swing.JScrollPane();
m_jImage = new javax.swing.JLabel();
jPanel1 = new javax.swing.JPanel();
jPanel2 = new javax.swing.JPanel();
m_jbtnopen = new javax.swing.JButton();
m_jbtnclose = new javax.swing.JButton();
m_jbtnzoomin = new javax.swing.JButton();
m_jPercent = new javax.swing.JLabel();
m_jbtnzoomout = new javax.swing.JButton();
setLayout(new java.awt.BorderLayout());
m_jImage
.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
m_jImage
.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER);
m_jScr.setViewportView(m_jImage);
add(m_jScr, java.awt.BorderLayout.CENTER);
jPanel1.setLayout(new java.awt.BorderLayout());
jPanel2.setBorder(javax.swing.BorderFactory.createEmptyBorder(
0, 5, 0, 5));
jPanel2.setLayout(new java.awt.GridLayout(0, 1, 0, 5));
m_jbtnopen.setIcon(new javax.swing.ImageIcon(getClass()
.getResource("/com/openbravo/images/fileopen.png"))); // NOI18N
m_jbtnopen
.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(
java.awt.event.ActionEvent evt) {
m_jbtnopenActionPerformed(evt);
}
});
jPanel2.add(m_jbtnopen);
m_jbtnclose.setIcon(new javax.swing.ImageIcon(getClass()
.getResource("/com/openbravo/images/fileclose.png"))); // NOI18N
m_jbtnclose
.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(
java.awt.event.ActionEvent evt) {
m_jbtncloseActionPerformed(evt);
}
});
jPanel2.add(m_jbtnclose);
m_jbtnzoomin.setIcon(new javax.swing.ImageIcon(getClass()
.getResource("/com/openbravo/images/viewmag+.png"))); // NOI18N
m_jbtnzoomin
.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(
java.awt.event.ActionEvent evt) {
m_jbtnzoominActionPerformed(evt);
}
});
jPanel2.add(m_jbtnzoomin);
m_jPercent.setBackground(java.awt.Color.white);
m_jPercent
.setHorizontalAlignment(javax.swing.SwingConstants.RIGHT);
m_jPercent.setBorder(javax.swing.BorderFactory
.createCompoundBorder(javax.swing.BorderFactory
.createLineBorder(javax.swing.UIManager
.getDefaults().getColor(
"Button.darkShadow")),
javax.swing.BorderFactory.createEmptyBorder(1,
4, 1, 4)));
m_jPercent.setOpaque(true);
jPanel2.add(m_jPercent);
m_jbtnzoomout.setIcon(new javax.swing.ImageIcon(getClass()
.getResource("/com/openbravo/images/viewmag-.png"))); // NOI18N
m_jbtnzoomout
.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(
java.awt.event.ActionEvent evt) {
m_jbtnzoomoutActionPerformed(evt);
}
});
jPanel2.add(m_jbtnzoomout);
jPanel1.add(jPanel2, java.awt.BorderLayout.NORTH);
add(jPanel1, java.awt.BorderLayout.LINE_END);
}// </editor-fold>//GEN-END:initComponents
private void m_jbtnzoomoutActionPerformed(
java.awt.event.ActionEvent evt) {//GEN-FIRST:event_m_jbtnzoomoutActionPerformed
decZoom();
}//GEN-LAST:event_m_jbtnzoomoutActionPerformed
private void m_jbtnzoominActionPerformed(
java.awt.event.ActionEvent evt) {//GEN-FIRST:event_m_jbtnzoominActionPerformed
incZoom();
}//GEN-LAST:event_m_jbtnzoominActionPerformed
private void m_jbtncloseActionPerformed(
java.awt.event.ActionEvent evt) {//GEN-FIRST:event_m_jbtncloseActionPerformed
setImage(null);
}//GEN-LAST:event_m_jbtncloseActionPerformed
private void m_jbtnopenActionPerformed(
java.awt.event.ActionEvent evt) {//GEN-FIRST:event_m_jbtnopenActionPerformed
doLoad();
}//GEN-LAST:event_m_jbtnopenActionPerformed
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JPanel jPanel1;
private javax.swing.JPanel jPanel2;
private javax.swing.JLabel m_jImage;
private javax.swing.JLabel m_jPercent;
private javax.swing.JScrollPane m_jScr;
private javax.swing.JButton m_jbtnclose;
private javax.swing.JButton m_jbtnopen;
private javax.swing.JButton m_jbtnzoomin;
private javax.swing.JButton m_jbtnzoomout;
// End of variables declaration//GEN-END:variables
}
| |
/*******************************************************************************
* Copyright 2014 xisberto
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package net.xisberto.work_schedule.alarm;
import java.io.IOException;
import net.xisberto.work_schedule.R;
import net.xisberto.work_schedule.database.Period;
import net.xisberto.work_schedule.settings.Settings;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.content.res.Configuration;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.media.RingtoneManager;
import android.net.Uri;
import android.os.Bundle;
import android.os.PowerManager;
import android.os.Vibrator;
import android.support.v4.app.NotificationCompat;
import android.support.v4.view.MotionEventCompat;
import android.text.format.DateFormat;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnTouchListener;
import android.view.WindowManager;
import android.view.animation.TranslateAnimation;
import android.widget.RelativeLayout;
import android.widget.TextView;
import android.widget.Toast;
import com.actionbarsherlock.app.SherlockFragmentActivity;
public class AlarmMessageActivity extends SherlockFragmentActivity implements
OnTouchListener {
public static final String EXTRA_TIME = "time",
EXTRA_PERIOD_ID = "period_id",
ACTION_SHOW_ALARM = "net.xisberto.workschedule.show_alarm",
ACTION_DISMISS_ALARM = "net.xisberto.workschedule.dismiss_alarm",
ACTION_SNOOZE_ALARM = "net.xisberto.workschedule.snooze_alarm";
private static final int REQ_DISMISS = 1, REQ_SNOOZE = 2;
private MediaPlayer mMediaPlayer;
private int period_pref_id;
private Settings settings;
private float initialPoint;
private float currentPoint;
private boolean moving;
private HinterThread hinter;
private net.xisberto.work_schedule.database.Period period;
// Get an alarm sound. Try for saved user option. If none set, try default
// alarm, notification, or ringtone.
private Uri getAlarmUri() {
String ringtone = settings.getRingtone();
Uri alert = null;
if (ringtone != null) {
alert = Uri.parse(ringtone);
} else {
alert = RingtoneManager.getDefaultUri(RingtoneManager.TYPE_ALARM);
if (alert == null) {
alert = RingtoneManager
.getDefaultUri(RingtoneManager.TYPE_NOTIFICATION);
if (alert == null) {
alert = RingtoneManager
.getDefaultUri(RingtoneManager.TYPE_RINGTONE);
}
}
}
return alert;
}
private void prepareSound(Context context, Uri alert) {
mMediaPlayer = new MediaPlayer();
mMediaPlayer.setWakeMode(context, PowerManager.PARTIAL_WAKE_LOCK);
try {
mMediaPlayer.setDataSource(context, alert);
final AudioManager audioManager = (AudioManager) context
.getSystemService(Context.AUDIO_SERVICE);
if (audioManager.getStreamVolume(AudioManager.STREAM_ALARM) != 0) {
mMediaPlayer.setAudioStreamType(AudioManager.STREAM_ALARM);
mMediaPlayer.setLooping(true);
mMediaPlayer.prepare();
}
} catch (IOException e) {
System.out.println("OOPS");
}
}
private void stopSound() {
if (mMediaPlayer != null) {
mMediaPlayer.stop();
mMediaPlayer.release();
mMediaPlayer = null;
}
}
private void stopSoundVibrator() {
stopSound();
((Vibrator) getSystemService(VIBRATOR_SERVICE)).cancel();
}
private void cancelAlarm() {
period.enabled = false;
period.setAlarm(this, true);
period.persist(this);
stopSoundVibrator();
finish();
}
private void snoozeAlarm() {
settings = Settings.getInstance(getApplicationContext());
period.addTime(settings.getCalendar(R.string.key_snooze_increment));
period.enabled = true;
period.setAlarm(this, true);
period.persist(this);
Toast.makeText(
this,
getResources().getString(R.string.snooze_set_to) + " "
+ period.formatTime(DateFormat.is24HourFormat(this)),
Toast.LENGTH_SHORT).show();
stopSoundVibrator();
finish();
}
private void showNotification() {
Intent alarmIntent = new Intent(this, AlarmMessageActivity.class);
alarmIntent.setAction(AlarmMessageActivity.ACTION_SHOW_ALARM);
alarmIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK
| Intent.FLAG_ACTIVITY_EXCLUDE_FROM_RECENTS);
PendingIntent alarmSender = PendingIntent.getActivity(this,
period.getId(), alarmIntent, PendingIntent.FLAG_CANCEL_CURRENT);
Intent dismissIntent = new Intent(this, AlarmMessageActivity.class)
.setFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT).setAction(
ACTION_DISMISS_ALARM);
PendingIntent dismissSender = PendingIntent.getActivity(this,
REQ_DISMISS, dismissIntent, PendingIntent.FLAG_CANCEL_CURRENT);
Intent snoozeIntent = new Intent(this, AlarmMessageActivity.class)
.setFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT).setAction(
ACTION_SNOOZE_ALARM);
PendingIntent snoozeSender = PendingIntent.getActivity(this,
REQ_SNOOZE, snoozeIntent, PendingIntent.FLAG_CANCEL_CURRENT);
NotificationManager nm = (NotificationManager) this
.getSystemService(Context.NOTIFICATION_SERVICE);
Notification notification = new NotificationCompat.Builder(this)
.setSmallIcon(R.drawable.ic_stat_notification)
.setContentTitle(this.getString(period.getLabelId()))
.setTicker(this.getString(period.getLabelId()))
.setWhen(period.time.getTimeInMillis())
.setOngoing(false)
.setOnlyAlertOnce(true)
.setContentIntent(alarmSender)
.setDeleteIntent(dismissSender)
.addAction(R.drawable.ic_snooze, getString(R.string.snooze),
snoozeSender)
.addAction(R.drawable.ic_dismiss, getString(R.string.dismiss),
dismissSender).build();
nm.notify(period.getId(), notification);
}
private void dismissNotification() {
NotificationManager nm = (NotificationManager) this
.getSystemService(Context.NOTIFICATION_SERVICE);
nm.cancel(period_pref_id);
}
private boolean isLargeScreen() {
DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
Log.d(getPackageName(), "X size: " + metrics.widthPixels
/ metrics.density);
Log.d(getPackageName(), "Y size: " + metrics.heightPixels
/ metrics.density);
return (metrics.widthPixels / metrics.density > 600f);
}
private void setOrientation() {
if (isLargeScreen()) {
setRequestedOrientation(Configuration.ORIENTATION_LANDSCAPE);
} else {
setRequestedOrientation(Configuration.ORIENTATION_PORTRAIT);
}
}
private void startHinter() {
View hinter_top = findViewById(R.id.hinter_top);
View hinter_bottom = findViewById(R.id.hinter_bottom);
hinter = new HinterThread(hinter_top, hinter_bottom);
hinter.start();
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
overridePendingTransition(R.anim.activity_open_enter,
R.anim.activity_close_exit);
getWindow().addFlags(
WindowManager.LayoutParams.FLAG_DISMISS_KEYGUARD
| WindowManager.LayoutParams.FLAG_SHOW_WHEN_LOCKED
| WindowManager.LayoutParams.FLAG_TURN_SCREEN_ON);
setContentView(R.layout.activity_alarm_message);
period_pref_id = getIntent().getIntExtra(EXTRA_PERIOD_ID,
R.string.fstp_entrance);
Log.d("AlarmMessage", "showing alarm for " + period_pref_id);
period = Period.getPeriod(this, period_pref_id);
period.enabled = false;
period.persist(this);
Log.d("AlarmMessage", "time is " + period.formatTime(true));
settings = Settings.getInstance(getApplicationContext());
String time = period.formatTime(DateFormat.is24HourFormat(this));
((TextView) findViewById(R.id.txt_alarm_label)).setText(period
.getLabelId());
((TextView) findViewById(R.id.txt_alarm_time)).setText(time);
initialPoint = 0f;
currentPoint = 0f;
moving = false;
findViewById(R.id.frame_top).setOnTouchListener(this);
findViewById(R.id.frame_bottom).setOnTouchListener(this);
setOrientation();
prepareSound(getApplicationContext(), getAlarmUri());
if (settings.getVibrate()) {
((Vibrator) getSystemService(VIBRATOR_SERVICE)).vibrate(new long[] {
500, 500 }, 0);
}
showNotification();
}
@Override
protected void onNewIntent(Intent intent) {
super.onNewIntent(intent);
if (ACTION_DISMISS_ALARM.equals(intent.getAction())) {
cancelAlarm();
dismissNotification();
finish();
} else if (ACTION_SNOOZE_ALARM.equals(intent.getAction())) {
snoozeAlarm();
dismissNotification();
finish();
}
}
@Override
protected void onResume() {
super.onResume();
if (mMediaPlayer != null && !mMediaPlayer.isPlaying()) {
mMediaPlayer.start();
}
startHinter();
}
@Override
protected void onPause() {
super.onPause();
hinter.interrupt();
if (isFinishing()) {
overridePendingTransition(R.anim.activity_open_enter,
R.anim.activity_close_exit);
}
}
@Override
protected void onStop() {
super.onStop();
if (isFinishing()) {
dismissNotification();
stopSoundVibrator();
}
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
setOrientation();
}
@Override
public boolean dispatchKeyEvent(KeyEvent event) {
switch (event.getKeyCode()) {
case KeyEvent.KEYCODE_VOLUME_DOWN:
case KeyEvent.KEYCODE_VOLUME_MUTE:
stopSound();
return true;
default:
return super.dispatchKeyEvent(event);
}
}
@Override
public boolean onTouch(View view, MotionEvent event) {
if (event.getPointerCount() > 1) {
return super.onTouchEvent(event);
}
RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) view
.getLayoutParams();
int action = MotionEventCompat.getActionMasked(event);
if (view.getId() == R.id.frame_bottom || view.getId() == R.id.frame_top) {
switch (action) {
case MotionEvent.ACTION_DOWN:
initialPoint = event.getRawY();
moving = true;
break;
case MotionEvent.ACTION_MOVE:
if (moving) {
hinter.interrupt();
currentPoint = event.getRawY();
DisplayMetrics displayMetrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(
displayMetrics);
int screenHeight = displayMetrics.heightPixels;
if (view.getId() == R.id.frame_top) {
int new_margin = (int) (currentPoint - initialPoint);
params.topMargin = (new_margin > 0) ? new_margin : 0;
if ((new_margin > (screenHeight / 3))
&& (!isFinishing())) {
snoozeAlarm();
break;
}
} else {
int new_margin = (int) (initialPoint - currentPoint);
params.bottomMargin = (new_margin > 0) ? new_margin : 0;
if ((new_margin > (screenHeight / 3))
&& (!isFinishing())) {
cancelAlarm();
break;
}
}
view.setLayoutParams(params);
view.invalidate();
}
break;
case MotionEvent.ACTION_UP:
initialPoint = 0;
TranslateAnimation ta;
if (view.getId() == R.id.frame_top) {
ta = new TranslateAnimation(0, 0, params.topMargin, 0);
params.topMargin = 0;
} else {
ta = new TranslateAnimation(0, 0, -params.bottomMargin, 0);
params.bottomMargin = 0;
}
ta.setDuration(100);
view.setLayoutParams(params);
view.startAnimation(ta);
moving = false;
startHinter();
break;
default:
return super.onTouchEvent(event);
}
}
return true;
}
}
| |
package io.quarkus.arc.test.config;
import static java.util.stream.Collectors.toList;
import static java.util.stream.StreamSupport.stream;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.stream.Stream;
import javax.enterprise.context.Dependent;
import javax.inject.Inject;
import org.eclipse.microprofile.config.Config;
import org.eclipse.microprofile.config.spi.Converter;
import org.jboss.shrinkwrap.api.asset.StringAsset;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import io.quarkus.test.QuarkusUnitTest;
import io.smallrye.config.ConfigMapping;
import io.smallrye.config.ConfigValue;
import io.smallrye.config.SmallRyeConfig;
import io.smallrye.config.WithConverter;
import io.smallrye.config.WithDefault;
import io.smallrye.config.WithName;
import io.smallrye.config.WithParentName;
public class ConfigMappingTest {
@RegisterExtension
static final QuarkusUnitTest TEST = new QuarkusUnitTest()
.withApplicationRoot((jar) -> jar
.addAsResource(new StringAsset("config.my.prop=1234\n" +
"config.override.my.prop=5678\n" +
"group.host=localhost\n" +
"group.port=8080\n" +
"types.int=9\n" +
"types.long=9999999999\n" +
"types.float=99.9\n" +
"types.double=99.99\n" +
"types.char=c\n" +
"types.boolean=true\n" +
"types.value=1234\n" +
"optionals.server.host=localhost\n" +
"optionals.server.port=8080\n" +
"optionals.optional=optional\n" +
"optionals.optional.int=9\n" +
"collections.strings=foo,bar\n" +
"collections.ints=1,2,3\n" +
"maps.server.host=localhost\n" +
"maps.server.port=8080\n" +
"maps.group.server.host=localhost\n" +
"maps.group.server.port=8080\n" +
"maps.base.server.host=localhost\n" +
"maps.base.server.port=8080\n" +
"maps.base.group.server.host=localhost\n" +
"maps.base.group.server.port=8080\n" +
"converters.foo=notbar\n" +
"override.server.host=localhost\n" +
"override.server.port=8080\n" +
"cloud.server.host=cloud\n" +
"cloud.server.port=9000\n" +
"cloud.server.port=9000\n" +
"hierarchy.foo=bar"),
"application.properties"));
@Inject
Config config;
@ConfigMapping(prefix = "config")
public interface MyConfigMapping {
@WithName("my.prop")
String myProp();
}
@Inject
MyConfigMapping myConfigMapping;
@Test
void configMapping() {
SmallRyeConfig smallRyeConfig = ((SmallRyeConfig) config);
MyConfigMapping configMapping = smallRyeConfig.getConfigMapping(MyConfigMapping.class);
assertNotNull(configMapping);
assertEquals("1234", configMapping.myProp());
assertNotNull(myConfigMapping);
assertEquals("1234", myConfigMapping.myProp());
}
@ConfigMapping(prefix = "group")
public interface GroupMapping {
@WithParentName
ServerHost host();
@WithParentName
ServerPort port();
interface ServerHost {
String host();
}
interface ServerPort {
int port();
}
}
@Inject
GroupMapping groupMapping;
@Test
void groups() {
assertNotNull(groupMapping);
assertEquals("localhost", groupMapping.host().host());
assertEquals(8080, groupMapping.port().port());
}
@ConfigMapping(prefix = "types")
public interface SomeTypes {
@WithName("int")
int intPrimitive();
@WithName("int")
Integer intWrapper();
@WithName("long")
long longPrimitive();
@WithName("long")
Long longWrapper();
@WithName("float")
float floatPrimitive();
@WithName("float")
Float floatWrapper();
@WithName("double")
double doublePrimitive();
@WithName("double")
Double doubleWrapper();
@WithName("char")
char charPrimitive();
@WithName("char")
Character charWrapper();
@WithName("boolean")
boolean booleanPrimitive();
@WithName("boolean")
Boolean booleanWrapper();
@WithName("value")
ConfigValue configValue();
}
@Inject
SomeTypes types;
@Test
void types() {
assertNotNull(types);
assertEquals(9, types.intPrimitive());
assertEquals(9, types.intWrapper());
assertEquals(9999999999L, types.longPrimitive());
assertEquals(9999999999L, types.longWrapper());
assertEquals(99.9f, types.floatPrimitive());
assertEquals(99.9f, types.floatWrapper());
assertEquals(99.99, types.doublePrimitive());
assertEquals(99.99, types.doubleWrapper());
assertEquals('c', types.charPrimitive());
assertEquals('c', types.charWrapper());
assertTrue(types.booleanPrimitive());
assertTrue(types.booleanWrapper());
assertEquals("1234", types.configValue().getValue());
}
@ConfigMapping(prefix = "optionals")
public interface Optionals {
Optional<Server> server();
Optional<String> optional();
@WithName("optional.int")
OptionalInt optionalInt();
interface Server {
String host();
int port();
}
}
@Inject
Optionals optionals;
@Test
void optionals() {
assertTrue(optionals.server().isPresent());
assertEquals("localhost", optionals.server().get().host());
assertEquals(8080, optionals.server().get().port());
assertTrue(optionals.optional().isPresent());
assertEquals("optional", optionals.optional().get());
assertTrue(optionals.optionalInt().isPresent());
assertEquals(9, optionals.optionalInt().getAsInt());
}
@ConfigMapping(prefix = "collections")
public interface Collections {
@WithName("strings")
List<String> listStrings();
@WithName("ints")
List<Integer> listInts();
}
@Inject
Collections collections;
@Test
void collections() {
assertEquals(Stream.of("foo", "bar").collect(toList()), collections.listStrings());
assertEquals(Stream.of(1, 2, 3).collect(toList()), collections.listInts());
}
@ConfigMapping(prefix = "maps")
public interface Maps {
Map<String, String> server();
Map<String, Server> group();
interface Server {
String host();
int port();
}
}
@Inject
Maps maps;
@Test
void maps() {
assertEquals("localhost", maps.server().get("host"));
assertEquals(8080, Integer.valueOf(maps.server().get("port")));
assertEquals("localhost", maps.group().get("server").host());
assertEquals(8080, maps.group().get("server").port());
}
public interface ServerBase {
Map<String, String> server();
}
@ConfigMapping(prefix = "maps.base")
public interface MapsWithBase extends ServerBase {
@Override
Map<String, String> server();
Map<String, Server> group();
interface Server {
String host();
int port();
}
}
@Inject
MapsWithBase mapsWithBase;
@Test
void mapsWithBase() {
assertEquals("localhost", mapsWithBase.server().get("host"));
assertEquals(8080, Integer.valueOf(mapsWithBase.server().get("port")));
assertEquals("localhost", mapsWithBase.group().get("server").host());
assertEquals(8080, mapsWithBase.group().get("server").port());
}
@ConfigMapping(prefix = "defaults")
public interface Defaults {
@WithDefault("foo")
String foo();
@WithDefault("bar")
String bar();
}
@Inject
Defaults defaults;
@Test
void defaults() {
assertEquals("foo", defaults.foo());
assertEquals("bar", defaults.bar());
assertEquals("foo", config.getValue("defaults.foo", String.class));
final List<String> propertyNames = stream(config.getPropertyNames().spliterator(), false).collect(toList());
assertFalse(propertyNames.contains("defaults.foo"));
}
@ConfigMapping(prefix = "converters")
public interface Converters {
@WithConverter(FooBarConverter.class)
String foo();
class FooBarConverter implements Converter<String> {
@Override
public String convert(final String value) {
return "bar";
}
}
}
@Inject
Converters converters;
@Test
void converters() {
assertEquals("bar", converters.foo());
}
public interface Base {
String foo();
}
@ConfigMapping(prefix = "hierarchy")
public interface ExtendsBase extends Base {
}
@Inject
Base base;
@Inject
ExtendsBase extendsBase;
@Test
void hierarchy() {
assertSame(base, extendsBase);
assertEquals("bar", extendsBase.foo());
}
@Dependent
public static class ConstructorInjection {
private String myProp;
private String overrideProp;
@Inject
public ConstructorInjection(@ConfigMapping(prefix = "config") MyConfigMapping myConfigMapping,
@ConfigMapping(prefix = "config.override") MyConfigMapping override) {
this.myProp = myConfigMapping.myProp();
this.overrideProp = override.myProp();
}
public String getMyProp() {
return myProp;
}
public String getOverrideProp() {
return overrideProp;
}
}
@Inject
ConstructorInjection constructorInjection;
@Test
void constructorInjection() {
assertEquals("1234", constructorInjection.getMyProp());
assertEquals("5678", constructorInjection.getOverrideProp());
}
}
| |
/*
* Copyright (c) 1999, 2004, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
/*
* Licensed Materials - Property of IBM
* RMI-IIOP v1.0
* Copyright IBM Corp. 1998 1999 All Rights Reserved
*
*/
package com.sun.corba.se.impl.javax.rmi;
import java.lang.reflect.Method ;
import javax.rmi.CORBA.Tie;
import javax.rmi.CORBA.Util;
import java.rmi.RemoteException;
import java.rmi.NoSuchObjectException;
import java.rmi.Remote;
import java.util.Properties;
import org.omg.CORBA.ORB;
import org.omg.CORBA.portable.Delegate;
import org.omg.CORBA.SystemException;
import java.rmi.server.UnicastRemoteObject;
import java.rmi.server.RemoteStub;
import java.rmi.server.ExportException;
import java.net.URL;
import com.sun.corba.se.impl.util.JDKBridge;
import com.sun.corba.se.impl.util.Utility;
import com.sun.corba.se.impl.util.RepositoryId;
import com.sun.corba.se.spi.presentation.rmi.StubAdapter;
import java.security.AccessController;
import com.sun.corba.se.impl.orbutil.GetPropertyAction;
/**
* Server implementation objects may either inherit from
* javax.rmi.PortableRemoteObject or they may implement a remote interface
* and then use the exportObject method to register themselves as a server object.
* The toStub method takes a server implementation and returns a stub that
* can be used to access that server object.
* The connect method makes a Remote object ready for remote communication.
* The unexportObject method is used to deregister a server object, allowing it to become
* available for garbage collection.
* The narrow method takes an object reference or abstract interface type and
* attempts to narrow it to conform to
* the given interface. If the operation is successful the result will be an
* object of the specified type, otherwise an exception will be thrown.
*/
public class PortableRemoteObject
implements javax.rmi.CORBA.PortableRemoteObjectDelegate {
/**
* Makes a server object ready to receive remote calls. Note
* that subclasses of PortableRemoteObject do not need to call this
* method, as it is called by the constructor.
* @param obj the server object to export.
* @exception RemoteException if export fails.
*/
public void exportObject(Remote obj)
throws RemoteException {
if (obj == null) {
throw new NullPointerException("invalid argument");
}
// Has this object already been exported to IIOP?
if (Util.getTie(obj) != null) {
// Yes, so this is an error...
throw new ExportException (obj.getClass().getName() + " already exported");
}
// Can we load a Tie?
Tie theTie = Utility.loadTie(obj);
if (theTie != null) {
// Yes, so export it to IIOP...
Util.registerTarget(theTie,obj);
} else {
// No, so export to JRMP. If this is called twice for the
// same object, it will throw an ExportException...
UnicastRemoteObject.exportObject(obj);
}
}
/**
* Returns a stub for the given server object.
* @param obj the server object for which a stub is required. Must either be a subclass
* of PortableRemoteObject or have been previously the target of a call to
* {@link #exportObject}.
* @return the most derived stub for the object.
* @exception NoSuchObjectException if a stub cannot be located for the given server object.
*/
public Remote toStub (Remote obj)
throws NoSuchObjectException
{
Remote result = null;
if (obj == null) {
throw new NullPointerException("invalid argument");
}
// If the class is already an IIOP stub then return it.
if (StubAdapter.isStub( obj )) {
return obj;
}
// If the class is already a JRMP stub then return it.
if (obj instanceof RemoteStub) {
return obj;
}
// Has it been exported to IIOP?
Tie theTie = Util.getTie(obj);
if (theTie != null) {
result = Utility.loadStub(theTie,null,null,true);
} else {
if (Utility.loadTie(obj) == null) {
result = java.rmi.server.RemoteObject.toStub(obj);
}
}
if (result == null) {
throw new NoSuchObjectException("object not exported");
}
return result;
}
/**
* Deregisters a server object from the runtime, allowing the object to become
* available for garbage collection.
* @param obj the object to unexport.
* @exception NoSuchObjectException if the remote object is not
* currently exported.
*/
public void unexportObject(Remote obj)
throws NoSuchObjectException {
if (obj == null) {
throw new NullPointerException("invalid argument");
}
if (StubAdapter.isStub(obj) ||
obj instanceof RemoteStub) {
throw new NoSuchObjectException(
"Can only unexport a server object.");
}
Tie theTie = Util.getTie(obj);
if (theTie != null) {
Util.unexportObject(obj);
} else {
if (Utility.loadTie(obj) == null) {
UnicastRemoteObject.unexportObject(obj,true);
} else {
throw new NoSuchObjectException("Object not exported.");
}
}
}
/**
* Checks to ensure that an object of a remote or abstract interface type
* can be cast to a desired type.
* @param narrowFrom the object to check.
* @param narrowTo the desired type.
* @return an object which can be cast to the desired type.
* @throws ClassCastException if narrowFrom cannot be cast to narrowTo.
*/
public Object narrow ( Object narrowFrom,
Class narrowTo) throws ClassCastException
{
Object result = null;
if (narrowFrom == null)
return null;
if (narrowTo == null)
throw new NullPointerException("invalid argument");
try {
if (narrowTo.isAssignableFrom(narrowFrom.getClass()))
return narrowFrom;
// Is narrowTo an interface that might be
// implemented by a servant running on iiop?
if (narrowTo.isInterface() &&
narrowTo != java.io.Serializable.class &&
narrowTo != java.io.Externalizable.class) {
org.omg.CORBA.Object narrowObj
= (org.omg.CORBA.Object) narrowFrom;
// Create an id from the narrowTo type...
String id = RepositoryId.createForAnyType(narrowTo);
if (narrowObj._is_a(id)) {
return Utility.loadStub(narrowObj,narrowTo);
} else {
throw new ClassCastException( "Object is not of remote type " +
narrowTo.getName() ) ;
}
} else {
throw new ClassCastException( "Class " + narrowTo.getName() +
" is not a valid remote interface" ) ;
}
} catch(Exception error) {
ClassCastException cce = new ClassCastException() ;
cce.initCause( error ) ;
throw cce ;
}
}
/**
* Makes a Remote object ready for remote communication. This normally
* happens implicitly when the object is sent or received as an argument
* on a remote method call, but in some circumstances it is useful to
* perform this action by making an explicit call. See the
* {@link Stub#connect} method for more information.
* @param target the object to connect.
* @param source a previously connected object.
* @throws RemoteException if <code>source</code> is not connected
* or if <code>target</code> is already connected to a different ORB than
* <code>source</code>.
*/
public void connect (Remote target, Remote source)
throws RemoteException
{
if (target == null || source == null) {
throw new NullPointerException("invalid argument");
}
ORB orb = null;
try {
if (StubAdapter.isStub( source )) {
orb = StubAdapter.getORB( source ) ;
} else {
// Is this a servant that was exported to iiop?
Tie tie = Util.getTie(source);
if (tie == null) {
/* loadTie always succeeds for dynamic RMI-IIOP
// No, can we get a tie for it? If not,
// assume that source is a JRMP object...
if (Utility.loadTie(source) != null) {
// Yes, so it is an iiop object which
// has not been exported...
throw new RemoteException(
"'source' object not exported");
}
*/
} else {
orb = tie.orb();
}
}
} catch (SystemException e) {
throw new RemoteException("'source' object not connected", e );
}
boolean targetIsIIOP = false ;
Tie targetTie = null;
if (StubAdapter.isStub(target)) {
targetIsIIOP = true;
} else {
targetTie = Util.getTie(target);
if (targetTie != null) {
targetIsIIOP = true;
} else {
/* loadTie always succeeds for dynamic RMI-IIOP
if (Utility.loadTie(target) != null) {
throw new RemoteException("'target' servant not exported");
}
*/
}
}
if (!targetIsIIOP) {
// Yes. Do we have an ORB from the source object?
// If not, we're done - there is nothing to do to
// connect a JRMP object. If so, it is an error because
// the caller mixed JRMP and IIOP...
if (orb != null) {
throw new RemoteException(
"'source' object exported to IIOP, 'target' is JRMP");
}
} else {
// The target object is IIOP. Make sure we have a
// valid ORB from the source object...
if (orb == null) {
throw new RemoteException(
"'source' object is JRMP, 'target' is IIOP");
}
// And, finally, connect it up...
try {
if (targetTie != null) {
// Is the tie already connected?
try {
ORB existingOrb = targetTie.orb();
// Yes. Is it the same orb?
if (existingOrb == orb) {
// Yes, so nothing to do...
return;
} else {
// No, so this is an error...
throw new RemoteException(
"'target' object was already connected");
}
} catch (SystemException e) {}
// No, so do it...
targetTie.orb(orb);
} else {
StubAdapter.connect( target, orb ) ;
}
} catch (SystemException e) {
// The stub or tie was already connected...
throw new RemoteException(
"'target' object was already connected", e );
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.dataformat.bindy.csv;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Scanner;
import org.apache.camel.Exchange;
import org.apache.camel.dataformat.bindy.BindyAbstractDataFormat;
import org.apache.camel.dataformat.bindy.BindyAbstractFactory;
import org.apache.camel.dataformat.bindy.BindyCsvFactory;
import org.apache.camel.dataformat.bindy.FormatFactory;
import org.apache.camel.dataformat.bindy.util.ConverterUtils;
import org.apache.camel.spi.DataFormat;
import org.apache.camel.util.IOHelper;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A <a href="http://camel.apache.org/data-format.html">data format</a> (
* {@link DataFormat}) using Bindy to marshal to and from CSV files
*/
public class BindyCsvDataFormat extends BindyAbstractDataFormat {
private static final Logger LOG = LoggerFactory.getLogger(BindyCsvDataFormat.class);
public BindyCsvDataFormat() {
}
public BindyCsvDataFormat(Class<?> type) {
super(type);
}
@Override
public String getDataFormatName() {
return "bindy-csv";
}
@SuppressWarnings("unchecked")
public void marshal(Exchange exchange, Object body, OutputStream outputStream) throws Exception {
BindyCsvFactory factory = (BindyCsvFactory)getFactory();
ObjectHelper.notNull(factory, "not instantiated");
// Get CRLF
byte[] bytesCRLF = ConverterUtils.getByteReturn(factory.getCarriageReturn());
if (factory.getGenerateHeaderColumnNames()) {
String result = factory.generateHeader();
byte[] bytes = exchange.getContext().getTypeConverter().convertTo(byte[].class, exchange, result);
outputStream.write(bytes);
// Add a carriage return
outputStream.write(bytesCRLF);
}
List<Map<String, Object>> models = new ArrayList<Map<String, Object>>();
// the body is not a prepared list of map that bindy expects so help a bit here and create one for us
Iterator<Object> it = ObjectHelper.createIterator(body);
while (it.hasNext()) {
Object model = it.next();
if (model instanceof Map) {
models.add((Map<String, Object>) model);
} else {
String name = model.getClass().getName();
Map<String, Object> row = new HashMap<String, Object>(1);
row.put(name, model);
row.putAll(createLinkedFieldsModel(model));
models.add(row);
}
}
for (Map<String, Object> model : models) {
String result = factory.unbind(model);
byte[] bytes = exchange.getContext().getTypeConverter().convertTo(byte[].class, exchange, result);
outputStream.write(bytes);
// Add a carriage return
outputStream.write(bytesCRLF);
}
}
public Object unmarshal(Exchange exchange, InputStream inputStream) throws Exception {
BindyCsvFactory factory = (BindyCsvFactory)getFactory();
ObjectHelper.notNull(factory, "not instantiated");
// List of Pojos
List<Map<String, Object>> models = new ArrayList<Map<String, Object>>();
// Pojos of the model
Map<String, Object> model;
InputStreamReader in = new InputStreamReader(inputStream, IOHelper.getCharsetName(exchange));
// Scanner is used to read big file
Scanner scanner = new Scanner(in);
// Retrieve the separator defined to split the record
String separator = factory.getSeparator();
String quote = factory .getQuote();
ObjectHelper.notNull(separator, "The separator has not been defined in the annotation @CsvRecord or not instantiated during initModel.");
int count = 0;
try {
// If the first line of the CSV file contains columns name, then we
// skip this line
if (factory.getSkipFirstLine()) {
// Check if scanner is empty
if (scanner.hasNextLine()) {
scanner.nextLine();
}
}
while (scanner.hasNextLine()) {
// Read the line
String line = scanner.nextLine().trim();
if (ObjectHelper.isEmpty(line)) {
// skip if line is empty
continue;
}
// Increment counter
count++;
// Create POJO where CSV data will be stored
model = factory.factory();
// Split the CSV record according to the separator defined in
// annotated class @CSVRecord
String[] tokens = line.split(separator, factory.getAutospanLine() ? factory.getMaxpos() : -1);
List<String> result = Arrays.asList(tokens);
// must unquote tokens before use
result = unquoteTokens(result, separator, quote);
if (result.size() == 0 || result.isEmpty()) {
throw new java.lang.IllegalArgumentException("No records have been defined in the CSV");
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Size of the record splitted : {}", result.size());
}
// Bind data from CSV record with model classes
factory.bind(result, model, count);
// Link objects together
factory.link(model);
// Add objects graph to the list
models.add(model);
LOG.debug("Graph of objects created: {}", model);
}
}
// BigIntegerFormatFactory if models list is empty or not
// If this is the case (correspond to an empty stream, ...)
if (models.size() == 0) {
throw new java.lang.IllegalArgumentException("No records have been defined in the CSV");
} else {
return extractUnmarshalResult(models);
}
} finally {
scanner.close();
IOHelper.close(in, "in", LOG);
}
}
/**
* Unquote the tokens, by removing leading and trailing quote chars,
* as will handling fixing broken tokens which may have been split
* by a separator inside a quote.
*/
private List<String> unquoteTokens(List<String> result, String separator, String quote) {
// a current quoted token which we assemble from the broken pieces
// we need to do this as we use the split method on the String class
// to split the line using regular expression, and it does not handle
// if the separator char is also inside a quoted token, therefore we need
// to fix this afterwards
StringBuilder current = new StringBuilder();
List<String> answer = new ArrayList<String>();
for (String s : result) {
boolean startQuote = false;
boolean endQuote = false;
if (s.startsWith(quote)) {
s = s.substring(1);
startQuote = true;
}
if (s.endsWith(quote)) {
s = s.substring(0, s.length() - 1);
endQuote = true;
}
// are we in progress of rebuilding a broken token
boolean currentInProgress = current.length() > 0;
// situation when field ending with a separator symbol.
if (currentInProgress && startQuote && s.isEmpty()) {
// Add separator, append current and reset it
current.append(separator);
answer.add(current.toString());
current.setLength(0);
continue;
}
// if we hit a start token then rebuild a broken token
if (currentInProgress || startQuote) {
// append to current if we are in the middle of a start quote
if (currentInProgress) {
// must append separator back as this is a quoted token that was broken
// but a separator inside the quotes
current.append(separator);
}
current.append(s);
}
// are we in progress of rebuilding a broken token
currentInProgress = current.length() > 0;
if (endQuote) {
// we hit end quote so append current and reset it
answer.add(current.toString());
current.setLength(0);
} else if (!currentInProgress) {
// not rebuilding so add directly as is
answer.add(s);
}
}
// any left over from current?
if (current.length() > 0) {
answer.add(current.toString());
current.setLength(0);
}
return answer;
}
@Override
protected BindyAbstractFactory createModelFactory(FormatFactory formatFactory) throws Exception {
BindyCsvFactory bindyCsvFactory = new BindyCsvFactory(getClassType());
bindyCsvFactory.setFormatFactory(formatFactory);
return bindyCsvFactory;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.integrationtests;
import java.util.ArrayList;
import java.util.HashMap;
import org.apache.fineract.integrationtests.common.Utils;
import org.apache.fineract.integrationtests.common.charges.ChargesHelper;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import com.jayway.restassured.builder.RequestSpecBuilder;
import com.jayway.restassured.builder.ResponseSpecBuilder;
import com.jayway.restassured.http.ContentType;
import com.jayway.restassured.specification.RequestSpecification;
import com.jayway.restassured.specification.ResponseSpecification;
@SuppressWarnings({ "rawtypes" })
public class ChargesTest {
private ResponseSpecification responseSpec;
private RequestSpecification requestSpec;
@Before
public void setup() {
Utils.initializeRESTAssured();
this.requestSpec = new RequestSpecBuilder().setContentType(ContentType.JSON).build();
this.requestSpec.header("Authorization", "Basic " + Utils.loginIntoServerAndGetBase64EncodedAuthenticationKey());
this.responseSpec = new ResponseSpecBuilder().expectStatusCode(200).build();
}
@Test
public void testChargesForLoans() {
// Retrieving all Charges
ArrayList<HashMap> allChargesData = ChargesHelper.getCharges(this.requestSpec, this.responseSpec);
Assert.assertNotNull(allChargesData);
// Testing Creation, Updation and Deletion of Disbursement Charge
final Integer disbursementChargeId = ChargesHelper.createCharges(this.requestSpec, this.responseSpec,
ChargesHelper.getLoanDisbursementJSON());
Assert.assertNotNull(disbursementChargeId);
// Updating Charge Amount
HashMap changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, disbursementChargeId,
ChargesHelper.getModifyChargeJSON());
HashMap chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, disbursementChargeId);
Assert.assertEquals("Verifying Charge after Modification", chargeDataAfterChanges.get("amount"), changes.get("amount"));
changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, disbursementChargeId,
ChargesHelper.getModifyChargeAsPecentageAmountJSON());
chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, disbursementChargeId);
HashMap chargeChangedData = (HashMap) chargeDataAfterChanges.get("chargePaymentMode");
Assert.assertEquals("Verifying Charge after Modification", chargeChangedData.get("id"), changes.get("chargePaymentMode"));
chargeChangedData = (HashMap) chargeDataAfterChanges.get("chargeCalculationType");
Assert.assertEquals("Verifying Charge after Modification", chargeChangedData.get("id"), changes.get("chargeCalculationType"));
changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, disbursementChargeId,
ChargesHelper.getModifyChargeAsPecentageLoanAmountWithInterestJSON());
chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, disbursementChargeId);
chargeChangedData = (HashMap) chargeDataAfterChanges.get("chargeCalculationType");
Assert.assertEquals("Verifying Charge after Modification", chargeChangedData.get("id"), changes.get("chargeCalculationType"));
changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, disbursementChargeId,
ChargesHelper.getModifyChargeAsPercentageInterestJSON());
chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, disbursementChargeId);
chargeChangedData = (HashMap) chargeDataAfterChanges.get("chargeCalculationType");
Assert.assertEquals("Verifying Charge after Modification", chargeChangedData.get("id"), changes.get("chargeCalculationType"));
Integer chargeIdAfterDeletion = ChargesHelper.deleteCharge(this.responseSpec, this.requestSpec, disbursementChargeId);
Assert.assertEquals("Verifying Charge ID after deletion", disbursementChargeId, chargeIdAfterDeletion);
// Testing Creation, Updation and Deletion of Specified due date Charge
final Integer specifiedDueDateChargeId = ChargesHelper.createCharges(this.requestSpec, this.responseSpec,
ChargesHelper.getLoanSpecifiedDueDateJSON());
Assert.assertNotNull(specifiedDueDateChargeId);
// Updating Charge Amount
changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, specifiedDueDateChargeId,
ChargesHelper.getModifyChargeJSON());
chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, specifiedDueDateChargeId);
Assert.assertEquals("Verifying Charge after Modification", chargeDataAfterChanges.get("amount"), changes.get("amount"));
changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, specifiedDueDateChargeId,
ChargesHelper.getModifyChargeAsPecentageAmountJSON());
chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, specifiedDueDateChargeId);
chargeChangedData = (HashMap) chargeDataAfterChanges.get("chargePaymentMode");
Assert.assertEquals("Verifying Charge after Modification", chargeChangedData.get("id"), changes.get("chargePaymentMode"));
chargeChangedData = (HashMap) chargeDataAfterChanges.get("chargeCalculationType");
Assert.assertEquals("Verifying Charge after Modification", chargeChangedData.get("id"), changes.get("chargeCalculationType"));
changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, specifiedDueDateChargeId,
ChargesHelper.getModifyChargeAsPecentageLoanAmountWithInterestJSON());
chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, specifiedDueDateChargeId);
chargeChangedData = (HashMap) chargeDataAfterChanges.get("chargeCalculationType");
Assert.assertEquals("Verifying Charge after Modification", chargeChangedData.get("id"), changes.get("chargeCalculationType"));
changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, specifiedDueDateChargeId,
ChargesHelper.getModifyChargeAsPercentageInterestJSON());
chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, specifiedDueDateChargeId);
chargeChangedData = (HashMap) chargeDataAfterChanges.get("chargeCalculationType");
Assert.assertEquals("Verifying Charge after Modification", chargeChangedData.get("id"), changes.get("chargeCalculationType"));
chargeIdAfterDeletion = ChargesHelper.deleteCharge(this.responseSpec, this.requestSpec, specifiedDueDateChargeId);
Assert.assertEquals("Verifying Charge ID after deletion", specifiedDueDateChargeId, chargeIdAfterDeletion);
// Testing Creation, Updation and Deletion of Installment Fee Charge
final Integer installmentFeeChargeId = ChargesHelper.createCharges(this.requestSpec, this.responseSpec,
ChargesHelper.getLoanInstallmentFeeJSON());
// Updating Charge Amount
changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, installmentFeeChargeId,
ChargesHelper.getModifyChargeJSON());
chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, installmentFeeChargeId);
Assert.assertEquals("Verifying Charge after Modification", chargeDataAfterChanges.get("amount"), changes.get("amount"));
changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, installmentFeeChargeId,
ChargesHelper.getModifyChargeAsPecentageAmountJSON());
chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, installmentFeeChargeId);
chargeChangedData = (HashMap) chargeDataAfterChanges.get("chargePaymentMode");
Assert.assertEquals("Verifying Charge after Modification", chargeChangedData.get("id"), changes.get("chargePaymentMode"));
chargeChangedData = (HashMap) chargeDataAfterChanges.get("chargeCalculationType");
Assert.assertEquals("Verifying Charge after Modification", chargeChangedData.get("id"), changes.get("chargeCalculationType"));
changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, installmentFeeChargeId,
ChargesHelper.getModifyChargeAsPecentageLoanAmountWithInterestJSON());
chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, installmentFeeChargeId);
chargeChangedData = (HashMap) chargeDataAfterChanges.get("chargeCalculationType");
Assert.assertEquals("Verifying Charge after Modification", chargeChangedData.get("id"), changes.get("chargeCalculationType"));
changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, installmentFeeChargeId,
ChargesHelper.getModifyChargeAsPercentageInterestJSON());
chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, installmentFeeChargeId);
chargeChangedData = (HashMap) chargeDataAfterChanges.get("chargeCalculationType");
Assert.assertEquals("Verifying Charge after Modification", chargeChangedData.get("id"), changes.get("chargeCalculationType"));
chargeIdAfterDeletion = ChargesHelper.deleteCharge(this.responseSpec, this.requestSpec, installmentFeeChargeId);
Assert.assertEquals("Verifying Charge ID after deletion", installmentFeeChargeId, chargeIdAfterDeletion);
// Testing Creation, Updation and Deletion of Overdue Installment Fee
// Charge
final Integer overdueFeeChargeId = ChargesHelper.createCharges(this.requestSpec, this.responseSpec,
ChargesHelper.getLoanOverdueFeeJSON());
Assert.assertNotNull(overdueFeeChargeId);
// Updating Charge Amount
changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, overdueFeeChargeId, ChargesHelper.getModifyChargeJSON());
chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, overdueFeeChargeId);
Assert.assertEquals("Verifying Charge after Modification", chargeDataAfterChanges.get("amount"), changes.get("amount"));
changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, overdueFeeChargeId,
ChargesHelper.getModifyChargeAsPecentageAmountJSON());
chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, overdueFeeChargeId);
chargeChangedData = (HashMap) chargeDataAfterChanges.get("chargePaymentMode");
Assert.assertEquals("Verifying Charge after Modification", chargeChangedData.get("id"), changes.get("chargePaymentMode"));
chargeChangedData = (HashMap) chargeDataAfterChanges.get("chargeCalculationType");
Assert.assertEquals("Verifying Charge after Modification", chargeChangedData.get("id"), changes.get("chargeCalculationType"));
changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, overdueFeeChargeId,
ChargesHelper.getModifyChargeAsPecentageLoanAmountWithInterestJSON());
chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, overdueFeeChargeId);
chargeChangedData = (HashMap) chargeDataAfterChanges.get("chargeCalculationType");
Assert.assertEquals("Verifying Charge after Modification", chargeChangedData.get("id"), changes.get("chargeCalculationType"));
changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, overdueFeeChargeId,
ChargesHelper.getModifyChargeAsPercentageInterestJSON());
chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, overdueFeeChargeId);
chargeChangedData = (HashMap) chargeDataAfterChanges.get("chargeCalculationType");
Assert.assertEquals("Verifying Charge after Modification", chargeChangedData.get("id"), changes.get("chargeCalculationType"));
changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, overdueFeeChargeId,
ChargesHelper.getModifyChargeFeeFrequencyAsYearsJSON());
chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, overdueFeeChargeId);
chargeChangedData = (HashMap) chargeDataAfterChanges.get("feeFrequency");
Assert.assertEquals("Verifying Charge after Modification", chargeChangedData.get("id"), changes.get("feeFrequency"));
chargeIdAfterDeletion = ChargesHelper.deleteCharge(this.responseSpec, this.requestSpec, overdueFeeChargeId);
Assert.assertEquals("Verifying Charge ID after deletion", overdueFeeChargeId, chargeIdAfterDeletion);
}
@Test
public void testChargesForSavings() {
// Testing Creation, Updation and Deletion of Specified due date Charge
final Integer specifiedDueDateChargeId = ChargesHelper.createCharges(this.requestSpec, this.responseSpec,
ChargesHelper.getSavingsSpecifiedDueDateJSON());
Assert.assertNotNull(specifiedDueDateChargeId);
// Updating Charge Amount
HashMap changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, specifiedDueDateChargeId,
ChargesHelper.getModifyChargeJSON());
HashMap chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, specifiedDueDateChargeId);
Assert.assertEquals("Verifying Charge after Modification", chargeDataAfterChanges.get("amount"), changes.get("amount"));
Integer chargeIdAfterDeletion = ChargesHelper.deleteCharge(this.responseSpec, this.requestSpec, specifiedDueDateChargeId);
Assert.assertEquals("Verifying Charge ID after deletion", specifiedDueDateChargeId, chargeIdAfterDeletion);
// Testing Creation, Updation and Deletion of Savings Activation Charge
final Integer savingsActivationChargeId = ChargesHelper.createCharges(this.requestSpec, this.responseSpec,
ChargesHelper.getSavingsActivationFeeJSON());
Assert.assertNotNull(savingsActivationChargeId);
// Updating Charge Amount
changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, savingsActivationChargeId,
ChargesHelper.getModifyChargeJSON());
chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, savingsActivationChargeId);
Assert.assertEquals("Verifying Charge after Modification", chargeDataAfterChanges.get("amount"), changes.get("amount"));
chargeIdAfterDeletion = ChargesHelper.deleteCharge(this.responseSpec, this.requestSpec, savingsActivationChargeId);
Assert.assertEquals("Verifying Charge ID after deletion", savingsActivationChargeId, chargeIdAfterDeletion);
// Testing Creation, Updation and Deletion of Charge for Withdrawal Fee
final Integer withdrawalFeeChargeId = ChargesHelper.createCharges(this.requestSpec, this.responseSpec,
ChargesHelper.getSavingsWithdrawalFeeJSON());
Assert.assertNotNull(withdrawalFeeChargeId);
// Updating Charge-Calculation-Type to Withdrawal-Fee
changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, withdrawalFeeChargeId,
ChargesHelper.getModifyWithdrawalFeeSavingsChargeJSON());
chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, withdrawalFeeChargeId);
HashMap chargeChangedData = (HashMap) chargeDataAfterChanges.get("chargeCalculationType");
Assert.assertEquals("Verifying Charge after Modification", chargeChangedData.get("id"), changes.get("chargeCalculationType"));
chargeIdAfterDeletion = ChargesHelper.deleteCharge(this.responseSpec, this.requestSpec, withdrawalFeeChargeId);
Assert.assertEquals("Verifying Charge ID after deletion", withdrawalFeeChargeId, chargeIdAfterDeletion);
// Testing Creation, Updation and Deletion of Charge for Annual Fee
final Integer annualFeeChargeId = ChargesHelper.createCharges(this.requestSpec, this.responseSpec,
ChargesHelper.getSavingsAnnualFeeJSON());
Assert.assertNotNull(annualFeeChargeId);
// Updating Charge Amount
changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, annualFeeChargeId, ChargesHelper.getModifyChargeJSON());
chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, annualFeeChargeId);
Assert.assertEquals("Verifying Charge after Modification", chargeDataAfterChanges.get("amount"), changes.get("amount"));
chargeIdAfterDeletion = ChargesHelper.deleteCharge(this.responseSpec, this.requestSpec, annualFeeChargeId);
Assert.assertEquals("Verifying Charge ID after deletion", annualFeeChargeId, chargeIdAfterDeletion);
// Testing Creation, Updation and Deletion of Charge for Monthly Fee
final Integer monthlyFeeChargeId = ChargesHelper.createCharges(this.requestSpec, this.responseSpec,
ChargesHelper.getSavingsMonthlyFeeJSON());
Assert.assertNotNull(monthlyFeeChargeId);
// Updating Charge Amount
changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, monthlyFeeChargeId, ChargesHelper.getModifyChargeJSON());
chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, monthlyFeeChargeId);
Assert.assertEquals("Verifying Charge after Modification", chargeDataAfterChanges.get("amount"), changes.get("amount"));
chargeIdAfterDeletion = ChargesHelper.deleteCharge(this.responseSpec, this.requestSpec, monthlyFeeChargeId);
Assert.assertEquals("Verifying Charge ID after deletion", monthlyFeeChargeId, chargeIdAfterDeletion);
// Testing Creation, Updation and Deletion of Charge for Overdraft Fee
final Integer overdraftFeeChargeId = ChargesHelper.createCharges(this.requestSpec, this.responseSpec,
ChargesHelper.getSavingsOverdraftFeeJSON());
Assert.assertNotNull(overdraftFeeChargeId);
// Updating Charge Amount
changes = ChargesHelper.updateCharges(this.requestSpec, this.responseSpec, overdraftFeeChargeId,
ChargesHelper.getModifyChargeJSON());
chargeDataAfterChanges = ChargesHelper.getChargeById(this.requestSpec, this.responseSpec, overdraftFeeChargeId);
Assert.assertEquals("Verifying Charge after Modification", chargeDataAfterChanges.get("amount"), changes.get("amount"));
chargeIdAfterDeletion = ChargesHelper.deleteCharge(this.responseSpec, this.requestSpec, overdraftFeeChargeId);
Assert.assertEquals("Verifying Charge ID after deletion", overdraftFeeChargeId, chargeIdAfterDeletion);
}
}
| |
/*
* Copyright 2010-2020 Alfresco Software, Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.bpmn;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import com.mxgraph.layout.mxGraphLayout;
import com.mxgraph.model.mxCell;
import com.mxgraph.model.mxGeometry;
import com.mxgraph.model.mxIGraphModel;
import com.mxgraph.util.mxRectangle;
import com.mxgraph.view.mxGraph;
/**
* BPMNLayout
*
*/
public class BPMNLayout extends mxGraphLayout {
// NEW
protected BpmnAutoLayout bpmnAutoLayout;
public void setBpmnAutoLayout(BpmnAutoLayout bpmnAutoLayout) {
this.bpmnAutoLayout = bpmnAutoLayout;
}
// NEW
/**
* Specifies the orientation of the layout. Default is true.
*/
protected boolean horizontal;
/**
* Specifies if edge directions should be inverted. Default is false.
*/
protected boolean invert;
/**
* If the parent should be resized to match the width/height of the tree. Default is true.
*/
protected boolean resizeParent = true;
/**
* Specifies if the tree should be moved to the top, left corner if it is inside a top-level layer. Default is true.
*/
protected boolean moveTree = true;
/**
* Specifies if all edge points of traversed edges should be removed. Default is true.
*/
protected boolean resetEdges = true;
/**
* Holds the levelDistance. Default is 40.
*/
protected int levelDistance = 40;
/**
* Holds the nodeDistance. Default is 20.
*/
protected int nodeDistance = 20;
/**
*
* @param graph
*/
public BPMNLayout(mxGraph graph) {
this(graph, true);
}
/**
*
* @param graph
* @param horizontal
*/
public BPMNLayout(mxGraph graph, boolean horizontal) {
this(graph, horizontal, false);
}
/**
*
* @param graph
* @param horizontal
* @param invert
*/
public BPMNLayout(mxGraph graph, boolean horizontal, boolean invert) {
super(graph);
setUseBoundingBox(false);
this.horizontal = horizontal;
this.invert = invert;
}
public mxGraph getGraph() {
return (mxGraph) graph;
}
/**
* Returns a boolean indicating if the given <em>mxCell</em> should be ignored as a vertex. This returns true if the cell has no connections.
*
* @param vertex
* Object that represents the vertex to be tested.
* @return Returns true if the vertex should be ignored.
*/
public boolean isVertexIgnored(Object vertex) {
return super.isVertexIgnored(vertex) || graph.isSwimlane(vertex) || graph.getModel().getGeometry(vertex).isRelative() || graph.getConnections(vertex).length == 0;
}
/**
* @return the horizontal
*/
public boolean isHorizontal() {
return horizontal;
}
/**
* @param horizontal
* the horizontal to set
*/
public void setHorizontal(boolean horizontal) {
this.horizontal = horizontal;
}
/**
* @return the invert
*/
public boolean isInvert() {
return invert;
}
/**
* @param invert
* the invert to set
*/
public void setInvert(boolean invert) {
this.invert = invert;
}
/**
* @return the resizeParent
*/
public boolean isResizeParent() {
return resizeParent;
}
/**
* @param resizeParent
* the resizeParent to set
*/
public void setResizeParent(boolean resizeParent) {
this.resizeParent = resizeParent;
}
/**
* @return the moveTree
*/
public boolean isMoveTree() {
return moveTree;
}
/**
* @param moveTree
* the moveTree to set
*/
public void setMoveTree(boolean moveTree) {
this.moveTree = moveTree;
}
/**
* @return the resetEdges
*/
public boolean isResetEdges() {
return resetEdges;
}
/**
* @param resetEdges
* the resetEdges to set
*/
public void setResetEdges(boolean resetEdges) {
this.resetEdges = resetEdges;
}
/**
* @return the levelDistance
*/
public int getLevelDistance() {
return levelDistance;
}
/**
* @param levelDistance
* the levelDistance to set
*/
public void setLevelDistance(int levelDistance) {
this.levelDistance = levelDistance;
}
/**
* @return the nodeDistance
*/
public int getNodeDistance() {
return nodeDistance;
}
/**
* @param nodeDistance
* the nodeDistance to set
*/
public void setNodeDistance(int nodeDistance) {
this.nodeDistance = nodeDistance;
}
public void execute(Object parent) {
mxIGraphModel model = graph.getModel();
List<Object> roots = graph.findTreeRoots(parent, true, invert);
// if (getGraph().isOrganizationElement(parent)) {
// roots = asList(graph.getSelectionCells());
// }
for (Object root : roots) {
parent = model.getParent(root);
if (isBoundaryEvent(root)) {
parent = model.getParent(parent);
}
model.beginUpdate();
try {
TreeNode node = dfs(root, parent, null);
if (node != null) {
layout(node);
double x0 = graph.getGridSize();
double y0 = x0;
if (!moveTree || parent == graph.getDefaultParent() || parent == graph.getCurrentRoot()) {
mxGeometry g = model.getGeometry(root);
if (g.isRelative()) {
g = model.getGeometry(model.getParent(root));
}
if (g != null) {
x0 = g.getX();
y0 = g.getY();
}
}
mxRectangle bounds = null;
if (horizontal) {
bounds = horizontalLayout(node, x0, y0, null);
} else {
bounds = verticalLayout(node, null, x0, y0, null);
}
if (bounds != null) {
double dx = 0;
double dy = 0;
if (bounds.getX() < 0) {
dx = Math.abs(x0 - bounds.getX());
}
if (bounds.getY() < 0) {
dy = Math.abs(y0 - bounds.getY());
}
if (parent != null) {
mxRectangle size = graph.getStartSize(parent);
dx += size.getWidth();
dy += size.getHeight();
// Resize parent swimlane
if (resizeParent && !graph.isCellCollapsed(parent)) {
mxGeometry g = model.getGeometry(parent);
if (g != null) {
double width = bounds.getWidth() + size.getWidth() - bounds.getX() + 2 * x0;
double height = bounds.getHeight() + size.getHeight() - bounds.getY() + 2 * y0;
g = (mxGeometry) g.clone();
if (g.getWidth() > width) {
dx += (g.getWidth() - width) / 2;
} else {
g.setWidth(width);
}
if (g.getHeight() > height) {
if (horizontal) {
dy += (g.getHeight() - height) / 2;
}
} else {
g.setHeight(height);
}
model.setGeometry(parent, g);
}
}
}
if (model.getParent(node.cell) != graph.getCurrentRoot() && model.getParent(node.cell) != graph.getDefaultParent()) {
moveNode(node, dx, dy);
}
}
}
} finally {
model.endUpdate();
}
}
}
protected boolean isBoundaryEvent(Object obj) {
if (obj instanceof mxCell) {
mxCell cell = (mxCell) obj;
return cell.getId().startsWith("boundary-event-");
}
return false;
}
/**
* Moves the specified node and all of its children by the given amount.
*/
protected void moveNode(TreeNode node, double dx, double dy) {
node.x += dx;
node.y += dy;
apply(node, null);
TreeNode child = node.child;
while (child != null) {
moveNode(child, dx, dy);
child = child.next;
}
}
/**
* Does a depth first search starting at the specified cell. Makes sure the specified swimlane is never left by the algorithm.
*/
protected TreeNode dfs(Object cell, Object parent, Set<Object> visited) {
if (visited == null) {
visited = new HashSet<Object>();
}
TreeNode node = null;
mxIGraphModel model = graph.getModel();
if (cell != null && !visited.contains(cell) && (!isVertexIgnored(cell) || isBoundaryEvent(cell))) {
visited.add(cell);
node = createNode(cell);
TreeNode prev = null;
Object[] out = graph.getEdges(cell, parent, invert, !invert, false);
for (int i = 0; i < out.length; i++) {
Object edge = out[i];
if (!isEdgeIgnored(edge)) {
// Resets the points on the traversed edge
if (resetEdges) {
setEdgePoints(edge, null);
}
// Checks if terminal in same swimlane
Object target = graph.getView().getVisibleTerminal(edge, invert);
TreeNode tmp = dfs(target, parent, visited);
if (tmp != null && model.getGeometry(target) != null) {
if (prev == null) {
node.child = tmp;
} else {
prev.next = tmp;
}
prev = tmp;
}
}
}
}
return node;
}
/**
* Starts the actual compact tree layout algorithm at the given node.
*/
protected void layout(TreeNode node) {
if (node != null) {
TreeNode child = node.child;
while (child != null) {
layout(child);
child = child.next;
}
if (node.child != null) {
attachParent(node, join(node));
} else {
layoutLeaf(node);
}
}
}
protected mxRectangle horizontalLayout(TreeNode node, double x0, double y0, mxRectangle bounds) {
node.x += x0 + node.offsetX;
node.y += y0 + node.offsetY;
bounds = apply(node, bounds);
TreeNode child = node.child;
if (child != null) {
bounds = horizontalLayout(child, node.x, node.y, bounds);
double siblingOffset = node.y + child.offsetY;
TreeNode s = child.next;
while (s != null) {
bounds = horizontalLayout(s, node.x + child.offsetX, siblingOffset, bounds);
siblingOffset += s.offsetY;
s = s.next;
}
}
return bounds;
}
protected mxRectangle verticalLayout(TreeNode node, Object parent, double x0, double y0, mxRectangle bounds) {
node.x += x0 + node.offsetY;
node.y += y0 + node.offsetX;
bounds = apply(node, bounds);
TreeNode child = node.child;
if (child != null) {
bounds = verticalLayout(child, node, node.x, node.y, bounds);
double siblingOffset = node.x + child.offsetY;
TreeNode s = child.next;
while (s != null) {
bounds = verticalLayout(s, node, siblingOffset, node.y + child.offsetX, bounds);
siblingOffset += s.offsetY;
s = s.next;
}
}
return bounds;
}
/**
*
*/
protected void attachParent(TreeNode node, double height) {
double x = nodeDistance + levelDistance;
double y2 = (height - node.width) / 2 - nodeDistance;
double y1 = y2 + node.width + 2 * nodeDistance - height;
node.child.offsetX = x + node.height;
if (isBoundaryEvent(node.cell)) {
node.child.offsetY = y1 + node.child.width;
} else {
node.child.offsetY = y1;
}
node.contour.upperHead = createLine(node.height, 0, createLine(x, y1, node.contour.upperHead));
node.contour.lowerHead = createLine(node.height, 0, createLine(x, y2, node.contour.lowerHead));
}
/**
*
*/
protected void layoutLeaf(TreeNode node) {
double dist = 2 * nodeDistance;
node.contour.upperTail = createLine(node.height + dist, 0, null);
node.contour.upperHead = node.contour.upperTail;
node.contour.lowerTail = createLine(0, -node.width - dist, null);
node.contour.lowerHead = createLine(node.height + dist, 0, node.contour.lowerTail);
}
/**
*
*/
protected double join(TreeNode node) {
double dist = 2 * nodeDistance;
TreeNode child = node.child;
node.contour = child.contour;
double h = child.width + dist;
double sum = h;
child = child.next;
while (child != null) {
double d = merge(node.contour, child.contour);
child.offsetY = d + h;
child.offsetX = 0;
h = child.width + dist;
sum += d + h;
child = child.next;
}
return sum;
}
/**
*
*/
protected double merge(Polygon p1, Polygon p2) {
double x = 0;
double y = 0;
double total = 0;
Polyline upper = p1.lowerHead;
Polyline lower = p2.upperHead;
while (lower != null && upper != null) {
double d = offset(x, y, lower.dx, lower.dy, upper.dx, upper.dy);
y += d;
total += d;
if (x + lower.dx <= upper.dx) {
x += lower.dx;
y += lower.dy;
lower = lower.next;
} else {
x -= upper.dx;
y -= upper.dy;
upper = upper.next;
}
}
if (lower != null) {
Polyline b = bridge(p1.upperTail, 0, 0, lower, x, y);
p1.upperTail = (b.next != null) ? p2.upperTail : b;
p1.lowerTail = p2.lowerTail;
} else {
Polyline b = bridge(p2.lowerTail, x, y, upper, 0, 0);
if (b.next == null) {
p1.lowerTail = b;
}
}
p1.lowerHead = p2.lowerHead;
return total;
}
/**
*
*/
protected double offset(double p1, double p2, double a1, double a2, double b1, double b2) {
double d = 0;
if (b1 <= p1 || p1 + a1 <= 0) {
return 0;
}
double t = b1 * a2 - a1 * b2;
if (t > 0) {
if (p1 < 0) {
double s = p1 * a2;
d = s / a1 - p2;
} else if (p1 > 0) {
double s = p1 * b2;
d = s / b1 - p2;
} else {
d = -p2;
}
} else if (b1 < p1 + a1) {
double s = (b1 - p1) * a2;
d = b2 - (p2 + s / a1);
} else if (b1 > p1 + a1) {
double s = (a1 + p1) * b2;
d = s / b1 - (p2 + a2);
} else {
d = b2 - (p2 + a2);
}
if (d > 0) {
return d;
}
return 0;
}
/**
*
*/
protected Polyline bridge(Polyline line1, double x1, double y1, Polyline line2, double x2, double y2) {
double dx = x2 + line2.dx - x1;
double dy = 0;
double s = 0;
if (line2.dx == 0) {
dy = line2.dy;
} else {
s = dx * line2.dy;
dy = s / line2.dx;
}
Polyline r = createLine(dx, dy, line2.next);
line1.next = createLine(0, y2 + line2.dy - dy - y1, r);
return r;
}
/**
*
*/
protected TreeNode createNode(Object cell) {
TreeNode node = new TreeNode(cell);
mxRectangle geo = getVertexBounds(cell);
if (geo != null) {
if (horizontal) {
node.width = geo.getHeight();
node.height = geo.getWidth();
} else {
node.width = geo.getWidth();
node.height = geo.getHeight();
}
}
return node;
}
/**
*
*/
protected mxRectangle apply(TreeNode node, mxRectangle bounds) {
mxRectangle g = graph.getModel().getGeometry(node.cell);
if (node.cell != null && g != null) {
if (isVertexMovable(node.cell)) {
g = setVertexLocation(node.cell, node.x, node.y);
}
if (bounds == null) {
bounds = new mxRectangle(g.getX(), g.getY(), g.getWidth(), g.getHeight());
} else {
bounds = new mxRectangle(Math.min(bounds.getX(), g.getX()), Math.min(bounds.getY(), g.getY()), Math.max(bounds.getX() + bounds.getWidth(), g.getX() + g.getWidth()), Math.max(bounds.getY()
+ bounds.getHeight(), g.getY() + g.getHeight()));
}
}
return bounds;
}
/**
*
*/
protected Polyline createLine(double dx, double dy, Polyline next) {
return new Polyline(dx, dy, next);
}
/**
*
*/
protected static class TreeNode {
/**
*
*/
protected Object cell;
/**
*
*/
protected double x, y, width, height, offsetX, offsetY;
/**
*
*/
protected TreeNode child, next; // parent, sibling
/**
*
*/
protected Polygon contour = new Polygon();
/**
*
*/
public TreeNode(Object cell) {
this.cell = cell;
}
}
/**
*
*/
protected static class Polygon {
/**
*
*/
protected Polyline lowerHead, lowerTail, upperHead, upperTail;
}
/**
*
*/
protected static class Polyline {
/**
*
*/
protected double dx, dy;
/**
*
*/
protected Polyline next;
/**
*
*/
protected Polyline(double dx, double dy, Polyline next) {
this.dx = dx;
this.dy = dy;
this.next = next;
}
}
}
| |
//
// Copyright (c)1998-2011 Pearson Education, Inc. or its affiliate(s).
// All rights reserved.
//
package openadk.generator;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.*;
/**
* GeneratorBase is the base class for code generation classes in AdkGen. It handles the
* reusable elements of code generation of the ADK classes, while subclasses implement the
* language-specific elements
*
*
* @author Andy Elmhorst
* @version 1.0
*/
public abstract class CodeGenerator extends Generator {
protected CodeGenerator( String srcDir, String destDir )
{
super( srcDir, destDir );
}
/**
* Writes a single ElementDef const line to the specified SDO Library class
* @param out
* @param o
*/
protected abstract void writeElementDefConst(PrintWriter out, String commentName, String constName );
protected abstract void writeToStringOverride(PrintWriter out, FieldDef def);
protected abstract void writeAliasDefinition(PrintWriter out, String elementDefName, String flags, String aliasVer, String renderAs, String sequence );
protected abstract void writeEnumClass(PrintWriter out, EnumDef enumDef);
protected abstract void writeSDOLibraryHeader(PrintWriter out, String className);
protected abstract void writeClassHeader( PrintWriter writer, ObjectDef def);
protected abstract void writeClassComment( PrintWriter writer, ObjectDef def);
protected abstract void writeClassCtor( PrintWriter writer, ObjectDef def );
protected abstract void writeAbstractMethods( PrintWriter writer, ObjectDef def);
protected abstract void writeSIFDTDClass(DB database, String dir, DB packageDB) throws IOException;
protected abstract void writeElementDefCreationLine(
PrintWriter out, String dtdSymbol, String parentDtdSymbol,
boolean useElementDefAlias,String fieldName, String renderName, int sequenceNumber,
FieldType classType, String localPackage, SIFVersion earliestVersion,
SIFVersion latestVersion, String flags, String typeConverter );
protected abstract String getTypeConverterName( FieldType fieldType );
protected abstract String getADKSimpleType( ADKDataType dataType );
protected abstract void writeComplexField( PrintWriter writer, ObjectDef objectDef, FieldDef field, ObjectDef def) throws GeneratorException;
protected abstract void writeSimpleField( PrintWriter writer, ObjectDef objectDef, FieldDef field );
protected abstract void writeDTDHeader( PrintWriter out, DB db, String pkg );
protected abstract void writeDtdLoad( PrintWriter out );
protected abstract void writeDTDTableUpdates( PrintWriter out, DB db, String pkg );
protected abstract void writeEnumClassComment( PrintWriter out, EnumDef enumDef );
protected abstract void writeEnumHeader( PrintWriter out, EnumDef enumDef );
protected abstract void writeDTDClassComment( PrintWriter out, DB db ) throws IOException;
protected abstract void writeDTDAbstractMethods( PrintWriter out );
protected abstract String symbol(String s);
protected abstract String getSuperClassSeperatorAndName( ObjectDef superClass );
/**
* Generate Java classes based on the information in the DB
*/
public void generate(DB[] dbs) throws IOException, GeneratorException, MergeException {
super.sortAndGenerateObjects( dbs );
// Now generate the DTD classes
System.out.println("\r\nGenerating DTD classes...");
generateDTDClasses(dbs);
}
protected static String[] InfraMessages = {
"SIF_Ack",
"SIF_Event",
"SIF_Provide",
"SIF_Register",
"SIF_Request",
"SIF_Response",
"SIF_Subscribe",
"SIF_SystemControl",
"SIF_Unprovide",
"SIF_Unregister",
"SIF_Unsubscribe",
"SIF_ZoneStatus",
"SIF_Provision"
};
/**
* Generates the SIFDTD class, which extends openadk.library.DTD
* to provide information about the core SIF data type definition. In
* addition, generates a derived class for each version of SIF, where
* the name is the version (e.g."SIF10r1").
*
* @param dbs An array of DB objects sorted by SIF version number
*/
protected void generateDTDClasses(DB[] databases) throws IOException, GeneratorException {
String dir = fDir+"openadk/library";
// Use the latest database for retrieving package names
DB packageDB = databases[ databases.length - 1 ];
PrintWriter out;
writeSIFDTDClass(databases[0], dir, packageDB);
//
// Create SDOLibrary.java classes...
//
for( String sdoName : packageDB.getDefinitionFileKeysSet() )
{
// Generate the package-specific SDOLibrary classes...
String packageDir = fDir+"openadk/library/" + sdoName;
String packageSDOName = sdoName.substring( 0 , 1 ).toUpperCase() + sdoName.substring( 1 ) + "DTD";
File md = new File(packageDir);
md.mkdirs();
String packageFn = packageDir + File.separator + packageSDOName + getFileExtension() ;
System.out.println("- Generating: "+ packageFn );
out = null;
try
{
out = new PrintWriter(new OutputStreamWriter(new FileOutputStream( packageFn ), "utf-8"),true );
writeDTDHeader(out, packageDB ,sdoName);
writeDTDClassComment(out, packageDB );
writeSDOLibraryHeader(out, packageSDOName );
// Write out the ElementDef statics for each object
ObjectDef[] o = databases[0].getObjects();
// Sort them
Arrays.sort( o, new Comparator<ObjectDef>()
{
public int compare( ObjectDef o1, ObjectDef o2 ) {
return o1.getDTDSymbol().compareTo( o2.getDTDSymbol() );
}
}
);
// Write the ElementDef constants to the DTD class
writeDTDConstants( out, sdoName, o );
// Write the load() method on the DTD class
writeDTDLoad(out, sdoName, packageDB, o);
out.println();
// Write the addElementMappings method on the DTD class
writeDTDTableUpdates( out, databases[0], sdoName );
writeClassFooter(out);
}
finally
{
if( out != null ) {
try {
out.close();
} catch( Exception ex ) {
}
}
}
}
// Generate the Enumeration classes
System.out.println("\r\nGenerating enum classes...");
writeEnumClasses(dir, databases[0].getEnums() );
}
private void writeEnumClasses(String dir, EnumDef[] enums) throws IOException {
String fn;
PrintWriter out;
for( int i = 0; i < enums.length; i++ )
{
File md = new File( dir + File.separator + enums[i].fPackage );
md.mkdirs();
fn = dir + File.separator + enums[i].fPackage+File.separator+enums[i].getName()+ getFileExtension();
System.out.println("- Generating: "+fn);
out = null;
try
{
out = new PrintWriter(new OutputStreamWriter(new FileOutputStream( fn ), "utf-8"),true );
writeEnumHeader(out,enums[i]);
writeEnumClassComment(out,enums[i]);
writeEnumClass(out, enums[i]);
}
finally
{
if( out != null ) {
try {
out.close();
} catch( Exception e ) {
}
}
}
}
}
/**
* Write the load() method on the specified DTD class
* @param out
* @param sdoName
* @param db
* @param o
*/
private void writeDTDLoad(PrintWriter out, String sdoName, DB db, ObjectDef[] o) {
// Write out the initialize() method
out.println();
writeDtdLoad( out );
out.println("\t{");
out.println("\t\t// Objects defined by this SDO Library...");
out.println();
Vector<ObjectDef> sdoObjects = new Vector<ObjectDef>();
for( int k = 0; k < o.length; k++ )
{
boolean trap = o[k].getName().equalsIgnoreCase("SIF_LogEvent");
if( ( o[k].getFlags() & ObjectDef.FLAG_NO_SIFDTD ) == 0 )
{
if( !o[k].getLocalPackage().equals( sdoName ) ) {
if( trap ) System.out.println("************1");
continue;
}
sdoObjects.addElement(o[k]);
String typeConverter = null;
FieldType ft = o[k].getValueType();
if( ft != null ){
typeConverter = getTypeConverterName( ft );
}
writeElementDefCreationLine(
out, o[k].getDTDSymbol(), null, false,
o[k].getName(), o[k].getRenderAs(),
o[k].getSequenceOverride() == -1 ? 0 : o[k].getSequenceOverride(),
o[k].getValueType(), o[k].getLocalPackage(),
o[k].getEarliestVersion(),
o[k].getLatestVersion(),
( o[k].isTopic() ? "ElementDefImpl.FD_OBJECT":"" ),
typeConverter );
// Write out any aliases
Map<String, List<SIFVersion>> aliases = o[k].getAliases();
if( aliases != null )
{
for( Map.Entry<String, List<SIFVersion>> entry : aliases.entrySet() )
{
SIFVersion aliasVer = entry.getValue().get( 0 );
StringBuffer buf = new StringBuffer();
buf.append( aliasVer.getMajor() );
buf.append( aliasVer.getMinor() );
if( aliasVer.getRevision() > 0 ){
buf.append( "r" );
buf.append( aliasVer.getRevision() );
}
//LibraryDTD.TRANSACTIONLIST_TRANSACTION.defineVersionInfo(SIFVersion.SIF20, "Transaction", 1, (ElementDefImpl.FD_REPEATABLE)); // (SIF 20 alias)
writeAliasDefinition(out, o[k].getDTDSymbol(), "0", buf.toString(), entry.getKey(), "0" );
}
}
}
}
out.println();
// Write out a static ElementDef defining each SIF element
for( int k = 0; k < sdoObjects.size(); k++ )
{
ObjectDef oo = (ObjectDef)sdoObjects.elementAt(k);
out.println();
out.println("\t\t// <" + ( oo.getRenderAs() == null ? oo.getName() : oo.getRenderAs() ) + "> fields (" + oo.getAllFields().length + " entries)" );
FieldDef[] fields = oo.getDTDFields();
for( int f = 0; f < fields.length; f++ )
{
if( ( fields[f].getFlags() & ObjectDef.FLAG_NO_SIFDTD ) == 0 )
{
String flags = getFieldFlags( fields[f].getFlags() );
String fieldClassType = fields[f].getFieldType().getClassType();
ObjectDef fod = db.getObject( fieldClassType );
boolean useElementDefAlias = fields[f].isComplex() && !fieldClassType.equals(fields[f].getName());
// if( db.getVersion().compareTo( fields[f].getEarliestVersion() ) >= 0 )
{
String packageName = (fod == null ? oo.getLocalPackage() : fod.getLocalPackage());
String typeConverterName = null;
if( !fields[f].isComplex() ){
typeConverterName = getTypeConverterName( fields[f].getFieldType() );
} else if ( fod != null ) {
FieldType objSimpleType = fod.getValueType();
if( objSimpleType != null ){
typeConverterName = getTypeConverterName( objSimpleType );
}
}
writeElementDefCreationLine(
out, fields[f].getElementDefConst( this ), oo.getDTDSymbol(), useElementDefAlias,
fields[f].getName(), fields[f].getElementDefExpression(), fields[f].getSequence(), fields[f].getFieldType(),
packageName, fields[f].getEarliestVersion(), fields[f].getLatestVersion(), flags.toString(), typeConverterName );
// Write out any aliases
Set<Alias> aliases = fields[f].getAliases();
if( aliases != null )
{
for(Alias alias : aliases)
{
SIFVersion aliasVer = alias.getVersion();
StringBuffer buf = new StringBuffer();
buf.append( aliasVer.getMajor() );
buf.append( aliasVer.getMinor() );
if( aliasVer.getRevision() > 0 ){
buf.append( "r" );
buf.append( aliasVer.getRevision() );
}
writeAliasDefinition(
out, fields[f].getElementDefConst( this ),
getFieldFlags( alias.getFlags() ), buf.toString(),
alias.getElementDefExpression(), String.valueOf( alias.getSequence() ) );
}
}
}
}
}
if( oo.isTopic() ) {
//latestVersion may not be the latest one supported. ie, if you want to do a build of the old dm, or if .net is behind/ahead of java in terms of sif version support
SIFVersion latestVersion = SIFVersion.getEarliest(1);
for ( DB dbInList : Main.self.fDBs.values() ) {
if ( dbInList != null && dbInList.fVersion.compareTo(latestVersion) > 0 )
latestVersion = dbInList.fVersion;
}
writeElementDefCreationLine( out, oo.getDTDSymbol() + "_SIF_EXTENDEDELEMENTS", oo.getDTDSymbol(), false, "SIF_ExtendedElements", null,
127, null, (Main.self.fLanguage.equals("cs")? "global" : "common"), SIFVersion.SIF15r1, latestVersion, "0", null );
writeElementDefCreationLine( out, oo.getDTDSymbol() + "_SIF_METADATA", oo.getDTDSymbol(), false, "SIF_Metadata", null,
128, null, "datamodel", SIFVersion.SIF20, latestVersion, "0", null );
}
}
out.println("\t}");
}
/**
* Write the element def constants for the specified library class
* @param out
* @param sdoName
* @param o
*/
private void writeDTDConstants(PrintWriter out, String sdoName, ObjectDef[] o) {
// Write out the public constants defined by this package
int dtdItemCount = 2;
for( int k = 0; k < o.length; k++ )
{
if( !o[k].getLocalPackage().equals( sdoName ) ){
continue;
}
if( ( o[k].getFlags() & ObjectDef.FLAG_NO_SIFDTD ) == 0 )
{
writeElementDefConst(out, "Defines the <" + o[k].getName()+"> SIF Data Object", o[k].getDTDSymbol() );
dtdItemCount++;
}
}
out.println();
String comment = null;
for( int k = 0; k < o.length; k++ )
{
if( !o[k].getLocalPackage().equals( sdoName ) ){
continue;
}
out.println();
out.println("\t// Field elements of "+o[k].getDTDSymbol() + " (" + o[k].getAllFields().length + " fields)" );
FieldDef[] fields = o[k].getAllFields();
for( int f = 0; f < fields.length; f++ )
{
if( ( fields[f].getFlags() & FieldDef.FLAG_ATTRIBUTE ) != 0 ) {
comment = fields[f].getName() + " attribute";
}
else
comment = "<" + fields[f].getName() + "> element";
if( ( fields[f].getFlags() & FieldDef.FLAG_NO_SIFDTD ) == 0 )
{
writeElementDefConst( out, "Defines the " + comment + " as a child of <" + o[k].getName() + ">", fields[f].getDTDSymbol() );
dtdItemCount++;
}
}
if( o[k].isTopic() ) {
writeElementDefConst( out,
"SIF 1.5 and later: Defines the built-in SIF_ExtendedElements element common to all SIF Data Objects",
o[k].getDTDSymbol() + "_SIF_EXTENDEDELEMENTS" );
writeElementDefConst( out,
"SIF 2.0 and later: Defines the built-in SIF_Metadata element common to all SIF Data Objects",
o[k].getDTDSymbol() + "_SIF_METADATA" );
dtdItemCount+=2;
}
}
}
private String getFieldFlags( int flags ) {
StringBuffer strFlags = new StringBuffer();
if( ( flags & FieldDef.FLAG_ATTRIBUTE ) != 0 ) {
strFlags.append("ElementDefImpl.FD_ATTRIBUTE");
} else if( ( flags & FieldDef.FLAG_COMPLEX ) == 0) {
strFlags.append("ElementDefImpl.FD_FIELD");
}
if( ( flags & FieldDef.FLAG_DO_NOT_ENCODE ) != 0 ) {
if( strFlags.length() > 0 )
strFlags.append('|');
strFlags.append("ElementDefImpl.FD_DO_NOT_ENCODE");
}
if( ( flags & FieldDef.FLAG_COLLAPSED ) != 0 ) {
if( strFlags.length() > 0 )
strFlags.append('|');
strFlags.append("ElementDefImpl.FD_COLLAPSE");
}
if( (flags & FieldDef.FLAG_DEPRECATED) != 0 ) {
if( strFlags.length() > 0 )
strFlags.append('|');
strFlags.append("ElementDefImpl.FD_DEPRECATED");
}
if( ( flags & FieldDef.FLAG_REPEATABLE ) != 0 ) {
if( strFlags.length() > 0 )
strFlags.append('|');
strFlags.append("ElementDefImpl.FD_REPEATABLE");
}
return strFlags.toString();
}
protected String an(String str, boolean upperCase, boolean code) {
StringBuffer b = new StringBuffer();
char ch = Character.toLowerCase(str.charAt(0));
if( ch == 'a' || ch == 'e' || ch == 'i' || ch == 'o' || ch == 'u' || ch == 'h' )
b.append( upperCase ? "An":"an" );
else
b.append( upperCase ? "A":"a" );
b.append(" ");
if( code )
b.append("<code>");
b.append(str);
if( code )
b.append("</code>");
return b.toString();
}
protected String plural(String str) {
if( str.endsWith("s") )
return str+"es";
return str+"s";
}
protected void writeExtras(PrintWriter out, ObjectDef o) throws IOException {
writeExtras( out, o.getExtrasFile() );
}
protected void writeExtras(PrintWriter out, String fn) throws IOException {
if( fn != null )
{
BufferedReader in = null;
try
{
in = new BufferedReader( new FileReader(fn) );
out.println("\r\n// BEGIN EXTRA METHODS ("+fn.replace('\\', '/')+")\r\n");
do
{
String s = in.readLine();
out.println(s);
System.out.println(s);
}
while( in.ready() );
out.println("\r\n// END EXTRA METHODS\r\n");
}
finally
{
if( in != null ) {
try {
in.close();
} catch( IOException ignored ) { }
}
}
}
}
protected void writeFileHeader(PrintWriter out) {
out.println("// THIS FILE WAS AUTO-GENERATED BY ADKGEN -- DO NOT MODIFY!");
out.println();
out.println("//");
out.println("// Copyright (c)1998-2011 Pearson Education, Inc. or its affiliate(s).");
out.println("// All rights reserved.");
out.println("//");
out.println();
}
/**
* Generates a CSharp class file for an ObjectDef
*/
protected void generateObject(ObjectDef o) throws IOException, GeneratorException {
if ( this instanceof JavaGenerator && "SIF_ExtendedElement".equals( o.fName ) ) {
return;
}
String dir = fDir+"openadk.library."+ toProperCase( o.getLocalPackage() );
dir = dir.replace('.',File.separatorChar);
String fn = dir+File.separator+o.getName()+ getFileExtension();
System.out.println("- Generating: "+fn);
PrintWriter out = null;
try
{
File md = new File(dir);
md.mkdirs();
out = new PrintWriter( new OutputStreamWriter(new FileOutputStream( fn ), "utf-8"),true );
writeClassHeader(out,o);
writeClassCtor(out,o);
writeAbstractMethods(out,o);
// If the object has a simple content model, write out an accessor
// for the element content. This will consist of a single property
// name "Value" that gets and sets the text value of the element.
if( o.getName().equals( "StringElement" ) ){
System.out.println( "Break" );
}
FieldDef value = o.getValueDef( this );
if( value != null ){
writeSimpleField( out, o, value );
}
// Now write out getter/setter methods for each FieldDef. If the
// FieldDef is a complex object, multiple getters/setters may be
// written. If the FieldDef is a simple string-type field, only
// one set of getter/setter is written.
//
FieldDef[] fields = o.getAllFields();
ADKElementType elementType = o.getElementType( fDB, this );
for( int i = 0; i < fields.length; i++ )
{
if( fields[i].isComplex() )
{
ObjectDef ref = null;
String classType = fields[i].getFieldType().getClassType();
ref = fDB.getObject( classType );
if( ref == null ) {
throw new GeneratorException(
fields[i].getName()+
" references object type \"" +
classType +
"\", but that object type is not defined" );
} else {
writeComplexField(out,o,fields[i],ref);
}
}
else{
writeSimpleField(out,o,fields[i]);
}
}
// Special Case: If the element isEmpty and has only one attribute,
// override the toString() method to return that attribute's
// value (e.g. <StatePr Code="UT"/> falls into this category.)
//
if( o.isEmpty() )
{
FieldDef[] req = o.getMandatoryFields( this );
if( req != null && req.length == 1 )
{
FieldDef def = req[0];
writeToStringOverride(out, def);
}
}
try
{
writeExtras(out,o);
}
catch( Exception e )
{
throw new GeneratorException("Could not copy extras file for object "+o.getName()+": " + e );
}
writeClassFooter(out);
}
finally
{
if( out != null ) {
try {
out.close();
} catch( Exception e ) {
}
}
}
}
protected void writeClassFooter(PrintWriter out) {
out.println("}");
}
/**
* @param m
* @return Returns an array of strings. The first element is the data type that should be passed in
* to the constructor. The second element is the argument name to use. The third element is a block of code that creates the actual datatype to set
* in the parent object.
*/
/* Unused Code
protected String[] getConstructorParts( FieldDef m )
{
String[] returnVal = null;
ObjectDef paramType = fDB.getObject(m.getName());
if( paramType != null && paramType.getFields().length == 1 && !paramType.hasValueText() )
{
FieldDef[] mandatoryFields = paramType.getMandatoryFields();
if( mandatoryFields.length == 1 && mandatoryFields[0].getEnum() == null )
{
returnVal = getConstructorParts( mandatoryFields[0] );
if( returnVal == null )
{
returnVal = new String[3];
returnVal[0] = mandatoryFields[0].getClassType();
returnVal[1] = toArgument( mandatoryFields[0].getName() );
if( mandatoryFields[0].getClassType().equals("String"))
{
returnVal[2] = returnVal[1];
}
else
{
returnVal[2] = " new " + m.getClassType()+"( " + returnVal[1] + " ) " ;
}
}
else
{
returnVal[2] = " new " + m.getClassType()+"( " + returnVal[2] + " ) " ;
}
}
return returnVal;
}
return null;
}
protected abstract String toArgument( String src );
*/
protected String toArgument( FieldDef m )
{
ObjectDef paramType = fDB.getObject(m.getName());
if( paramType != null && paramType.getAllFields().length == 1 && paramType.getValueDef( this ) == null ) {
FieldDef[] mandatoryFields = paramType.getMandatoryFields( this );
if( mandatoryFields.length == 1 ){
if( mandatoryFields[0] == m || toArgument(mandatoryFields[0]) == null ){
return mandatoryFields[0].getFieldType().getClassType();
}
}
}
return null;
}
protected abstract String toProperCase(String s);
protected abstract String getFileExtension();
protected void writeDTDTableUpdatesForObject(
PrintWriter out,
DB db,
ObjectDef obj,
HashSet<String>alreadyAdded ) {
String objName = obj.getName();
if( objName.equals( "Demographics" ) ){
System.out.println("Ready to Break;");
}
SortedSet<TagItem> tags = new TreeSet<TagItem>();
addAliases( db, obj, objName, tags, obj.getLatestVersion(), null );
for( TagItem tagItem : tags ){
if( alreadyAdded.contains( tagItem.fTag ) ){
throw new RuntimeException( tagItem.fTag + " is already defined." );
}
alreadyAdded.add( tagItem.fTag );
writeSingleDTDTableUpdate( out, tagItem.fTag, tagItem.fElementDefConst, tagItem.fComment );
}
}
private void addAliases( DB db, ObjectDef obj, String parentObjName, Set<TagItem> addTo, SIFVersion maxVersion, String comment )
{
for( FieldDef field : obj.getAllFields() ){
if( (field.getFlags() & FieldDef.FLAG_NO_SIFDTD ) != 0 ||
(field.getFlags() & FieldDef.FLAG_TEXT_VALUE ) != 0 ){
continue;
}
if( parentObjName.equals( "Demographics" ) && field.getName().equals( "RaceList" ) ){
System.out.println( "Ready to Break" );
}
String elementDefConst = field.getElementDefConst( this );
String renderAs = field.getRenderAs();
if( renderAs == null ){
renderAs = field.getName();
}
addTo.add( new TagItem( parentObjName + "_" + renderAs, elementDefConst, comment ) );
if( ( field.getFlags() & FieldDef.FLAG_COLLAPSED ) != 0 ){
ObjectDef collapsedContainer = db.getObject( field.getFieldType().getClassType() );
addAliases( db, collapsedContainer, parentObjName, addTo, field.getEarliestVersion(), "Collapsed in " + field.getEarliestVersion() );
}
Set<Alias> aliases = field.getAliases();
if( aliases != null ){
for( Alias alias : aliases )
{
SIFVersion version = alias.getVersion();
if( version.compareTo( maxVersion ) <= 0 ){
if( ( alias.getFlags() & FieldDef.FLAG_COLLAPSED ) != 0 ){
ObjectDef collapsedContainer = db.getObject( field.getFieldType().getClassType() );
addAliases( db, collapsedContainer, parentObjName, addTo, version, "Collapsed in " + version );
} else {
TagItem item = new TagItem( parentObjName + "_" + alias.getTag(), elementDefConst, version.toString() + " alias" );
addTo.add( item );
}
}
}
}
}
}
protected abstract void writeSingleDTDTableUpdate(PrintWriter out, String tagCombination, String dtdConstName, String addComment);
private static class TagItem implements Comparable<TagItem>
{
private String fTag;
private String fComment;
private String fElementDefConst;
public TagItem( String tag, String elementDefConst, String comment )
{
fTag = tag;
fComment = comment;
fElementDefConst = elementDefConst;
}
public int compareTo( TagItem tag2 ){
return fTag.compareTo( tag2.fTag );
}
@Override
public boolean equals(Object arg0) {
if( arg0 == null ){
return false;
}
if( !(arg0 instanceof TagItem) ){
return false;
}
return fTag.equals(((TagItem)arg0).fTag);
}
@Override
public int hashCode() {
return fTag.hashCode();
}
}
}
| |
/*
* Copyright 2014 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Preconditions;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableSet;
import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback;
import com.google.javascript.jscomp.deps.ModuleLoader;
import com.google.javascript.rhino.IR;
import com.google.javascript.rhino.JSDocInfo;
import com.google.javascript.rhino.JSDocInfoBuilder;
import com.google.javascript.rhino.JSTypeExpression;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Rewrites a ES6 module into a form that can be safely concatenated.
* Note that we treat a file as an ES6 module if it has at least one import or
* export statement.
*
* @author moz@google.com (Michael Zhou)
*/
public final class ProcessEs6Modules extends AbstractPostOrderCallback {
private static final String DEFAULT_EXPORT_NAME = "$jscompDefaultExport";
static final DiagnosticType LHS_OF_GOOG_REQUIRE_MUST_BE_CONST =
DiagnosticType.error(
"JSC_LHS_OF_GOOG_REQUIRE_MUST_BE_CONST",
"The left side of a goog.require() must use ''const'' (not ''let'' or ''var'')");
static final DiagnosticType NAMESPACE_IMPORT_CANNOT_USE_STAR =
DiagnosticType.error(
"JSC_NAMESPACE_IMPORT_CANNOT_USE_STAR",
"Namespace imports ('goog:some.Namespace') cannot use import * as. "
+ "Did you mean to import {0} from ''{1}'';?");
private final Compiler compiler;
private int scriptNodeCount = 0;
/**
* Maps exported names to their names in current module.
*/
private Map<String, NameNodePair> exportMap = new LinkedHashMap<>();
/**
* Maps symbol names to a pair of (moduleName, originalName). The original
* name is the name of the symbol exported by the module. This is required
* because we want to be able to update the original property on the module
* object. Eg: "import {foo as f} from 'm'" maps 'f' to the pair ('m', 'foo').
* In the entry for "import * as ns", the originalName will be the empty string.
*/
private Map<String, ModuleOriginalNamePair> importMap = new HashMap<>();
private Set<String> classes = new HashSet<>();
private Set<String> typedefs = new HashSet<>();
private Set<String> alreadyRequired = new HashSet<>();
private boolean isEs6Module;
private boolean forceRewrite;
private Node googRequireInsertSpot;
/**
* Creates a new ProcessEs6Modules instance which can be used to rewrite
* ES6 modules to a concatenable form.
*/
public ProcessEs6Modules(Compiler compiler) {
this.compiler = compiler;
}
/**
* If a file contains an ES6 "import" or "export" statement, or the forceRewrite
* option is true, rewrite the source as a module.
*/
public void processFile(Node root, boolean forceRewrite) {
FindGoogProvideOrGoogModule finder = new FindGoogProvideOrGoogModule();
NodeTraversal.traverseEs6(compiler, root, finder);
if (finder.isFound()) {
return;
}
this.forceRewrite = forceRewrite;
isEs6Module = forceRewrite;
NodeTraversal.traverseEs6(compiler, root, this);
}
/**
* Avoid processing if we find the appearance of goog.provide or goog.module.
*
* <p>TODO(moz): Let ES6, CommonJS and goog.provide live happily together.
*/
static class FindGoogProvideOrGoogModule extends NodeTraversal.AbstractPreOrderCallback {
private boolean found;
boolean isFound() {
return found;
}
@Override
public boolean shouldTraverse(NodeTraversal nodeTraversal, Node n, Node parent) {
if (found) {
return false;
}
// Shallow traversal, since we don't need to inspect within functions or expressions.
if (parent == null
|| NodeUtil.isControlStructure(parent)
|| NodeUtil.isStatementBlock(parent)) {
if (n.isExprResult()) {
Node maybeGetProp = n.getFirstFirstChild();
if (maybeGetProp != null
&& (maybeGetProp.matchesQualifiedName("goog.provide")
|| maybeGetProp.matchesQualifiedName("goog.module"))) {
found = true;
return false;
}
}
return true;
}
return false;
}
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (n.isImport()) {
isEs6Module = true;
visitImport(t, n, parent);
} else if (n.isExport()) {
isEs6Module = true;
visitExport(t, n, parent);
} else if (n.isScript()) {
scriptNodeCount++;
visitScript(t, n);
}
}
private void visitImport(NodeTraversal t, Node importDecl, Node parent) {
String moduleName;
String importName = importDecl.getLastChild().getString();
boolean isNamespaceImport = importName.startsWith("goog:");
if (isNamespaceImport) {
// Allow importing Closure namespace objects (e.g. from goog.provide or goog.module) as
// import ... from 'goog:my.ns.Object'.
// These are rewritten to plain namespace object accesses.
moduleName = importName.substring("goog:".length());
} else {
ModuleLoader.ModulePath modulePath =
t.getInput()
.getPath()
.resolveJsModule(
importName,
importDecl.getSourceFileName(),
importDecl.getLineno(),
importDecl.getCharno());
if (modulePath == null) {
// The module loader issues an error
// Fall back to assuming the module is a file path
modulePath = t.getInput().getPath().resolveModuleAsPath(importName);
}
moduleName = modulePath.toModuleName();
}
for (Node child : importDecl.children()) {
if (child.isEmpty() || child.isString()) {
continue;
} else if (child.isName()) { // import a from "mod"
// Namespace imports' default export is the namespace itself.
String name = isNamespaceImport ? "" : "default";
importMap.put(child.getString(), new ModuleOriginalNamePair(moduleName, name));
} else if (child.isImportSpecs()) {
for (Node grandChild : child.children()) {
String origName = grandChild.getFirstChild().getString();
if (grandChild.hasTwoChildren()) { // import {a as foo} from "mod"
importMap.put(
grandChild.getLastChild().getString(),
new ModuleOriginalNamePair(moduleName, origName));
} else { // import {a} from "mod"
importMap.put(
origName,
new ModuleOriginalNamePair(moduleName, origName));
}
}
} else {
// import * as ns from "mod"
Preconditions.checkState(
child.isImportStar(), "Expected an IMPORT_STAR node, but was: %s", child);
// Namespace imports cannot be imported "as *".
if (isNamespaceImport) {
compiler.report(t.makeError(importDecl, NAMESPACE_IMPORT_CANNOT_USE_STAR,
child.getString(), moduleName));
}
importMap.put(
child.getString(),
new ModuleOriginalNamePair(moduleName, ""));
}
}
Node script = NodeUtil.getEnclosingScript(parent);
// Emit goog.require call for the module.
if (alreadyRequired.add(moduleName)) {
Node require = IR.exprResult(
IR.call(NodeUtil.newQName(compiler, "goog.require"), IR.string(moduleName)));
require.useSourceInfoIfMissingFromForTree(importDecl);
script.addChildAfter(require, googRequireInsertSpot);
googRequireInsertSpot = require;
t.getInput().addRequire(moduleName);
}
parent.removeChild(importDecl);
compiler.reportCodeChange();
}
private void visitExport(NodeTraversal t, Node export, Node parent) {
if (export.getBooleanProp(Node.EXPORT_DEFAULT)) {
// export default
// If the thing being exported is a class or function that has a name,
// extract it from the export statement, so that it can be referenced
// from within the module.
//
// export default class X {} -> class X {}; ... moduleName.default = X;
// export default function X() {} -> function X() {}; ... moduleName.default = X;
//
// Otherwise, create a local variable for it and export that.
//
// export default 'someExpression'
// ->
// var $jscompDefaultExport = 'someExpression';
// ...
// moduleName.default = $jscompDefaultExport;
Node child = export.getFirstChild();
String name = null;
if (child.isFunction() || child.isClass()) {
name = NodeUtil.getName(child);
}
if (name != null) {
Node decl = child.cloneTree();
decl.setJSDocInfo(child.getJSDocInfo());
parent.replaceChild(export, decl);
exportMap.put("default", new NameNodePair(name, child));
} else {
Node var = IR.var(IR.name(DEFAULT_EXPORT_NAME), export.removeFirstChild());
var.setJSDocInfo(child.getJSDocInfo());
child.setJSDocInfo(null);
var.useSourceInfoIfMissingFromForTree(export);
parent.replaceChild(export, var);
exportMap.put("default", new NameNodePair(DEFAULT_EXPORT_NAME, child));
}
} else if (export.getBooleanProp(Node.EXPORT_ALL_FROM)) {
// export * from 'moduleIdentifier';
compiler.report(JSError.make(export, Es6ToEs3Converter.CANNOT_CONVERT_YET,
"Wildcard export"));
} else if (export.hasTwoChildren()) {
// export {x, y as z} from 'moduleIdentifier';
Node moduleIdentifier = export.getLastChild();
Node importNode = IR.importNode(IR.empty(), IR.empty(), moduleIdentifier.cloneNode());
importNode.useSourceInfoFrom(export);
parent.addChildBefore(importNode, export);
visit(t, importNode, parent);
ModuleLoader.ModulePath path =
t.getInput()
.getPath()
.resolveJsModule(
moduleIdentifier.getString(),
export.getSourceFileName(),
export.getLineno(),
export.getCharno());
if (path == null) {
path = t.getInput().getPath().resolveModuleAsPath(moduleIdentifier.getString());
}
String moduleName = path.toModuleName();
for (Node exportSpec : export.getFirstChild().children()) {
String nameFromOtherModule = exportSpec.getFirstChild().getString();
String exportedName = exportSpec.getLastChild().getString();
exportMap.put(exportedName,
new NameNodePair(moduleName + "." + nameFromOtherModule, exportSpec));
}
parent.removeChild(export);
} else {
if (export.getFirstChild().getToken() == Token.EXPORT_SPECS) {
// export {Foo};
for (Node exportSpec : export.getFirstChild().children()) {
Node origName = exportSpec.getFirstChild();
exportMap.put(
exportSpec.hasTwoChildren()
? exportSpec.getLastChild().getString()
: origName.getString(),
new NameNodePair(origName.getString(), exportSpec));
}
parent.removeChild(export);
} else {
// export var Foo;
// export function Foo() {}
// etc.
Node declaration = export.getFirstChild();
Node first = declaration.getFirstChild();
for (Node maybeName = first; maybeName != null; maybeName = maybeName.getNext()) {
if (!maybeName.isName()) {
break;
}
// Break out on "B" in "class A extends B"
if (declaration.isClass() && maybeName != first) {
break;
}
String name = maybeName.getString();
Var v = t.getScope().getVar(name);
if (v == null || v.isGlobal()) {
exportMap.put(name, new NameNodePair(name, maybeName));
}
// If the declaration declares a new type, create annotations for
// the type checker.
// TODO(moz): Currently we only record ES6 classes and typedefs,
// need to handle other kinds of type declarations too.
if (declaration.isClass()) {
classes.add(name);
}
if (declaration.getJSDocInfo() != null && declaration.getJSDocInfo().hasTypedefType()) {
typedefs.add(name);
}
}
parent.replaceChild(export, declaration.detach());
}
compiler.reportCodeChange();
}
}
private void visitScript(NodeTraversal t, Node script) {
if (!isEs6Module) {
return;
}
ClosureRewriteModule.checkAndSetStrictModeDirective(t, script);
Preconditions.checkArgument(scriptNodeCount == 1,
"ProcessEs6Modules supports only one invocation per "
+ "CompilerInput / script node");
// rewriteRequires is here (rather than being part of the main visit()
// method, because we only want to rewrite the requires if this is an
// ES6 module.
rewriteRequires(script);
String moduleName = t.getInput().getPath().toModuleName();
for (Map.Entry<String, NameNodePair> entry : exportMap.entrySet()) {
String exportedName = entry.getKey();
String withSuffix = entry.getValue().name;
Node nodeForSourceInfo = entry.getValue().nodeForSourceInfo;
Node getProp = IR.getprop(IR.name(moduleName), IR.string(exportedName));
if (typedefs.contains(exportedName)) {
// /** @typedef {foo} */
// moduleName.foo;
JSDocInfoBuilder builder = new JSDocInfoBuilder(true);
JSTypeExpression typeExpr = new JSTypeExpression(
IR.string(exportedName), script.getSourceFileName());
builder.recordTypedef(typeExpr);
JSDocInfo info = builder.build();
getProp.setJSDocInfo(info);
Node exprResult = IR.exprResult(getProp)
.useSourceInfoIfMissingFromForTree(nodeForSourceInfo);
script.addChildToBack(exprResult);
} else {
// moduleName.foo = foo;
// with a @const annotation if needed.
Node assign = IR.assign(
getProp,
NodeUtil.newQName(compiler, withSuffix));
Node exprResult = IR.exprResult(assign)
.useSourceInfoIfMissingFromForTree(nodeForSourceInfo);
if (classes.contains(exportedName)) {
JSDocInfoBuilder builder = new JSDocInfoBuilder(true);
builder.recordConstancy();
JSDocInfo info = builder.build();
assign.setJSDocInfo(info);
}
script.addChildToBack(exprResult);
}
}
// Rename vars to not conflict in global scope.
NodeTraversal.traverseEs6(compiler, script, new RenameGlobalVars(moduleName));
if (!exportMap.isEmpty() || forceRewrite) {
// Add goog.provide call.
Node googProvide = IR.exprResult(
IR.call(NodeUtil.newQName(compiler, "goog.provide"),
IR.string(moduleName)));
script.addChildToFront(googProvide.useSourceInfoIfMissingFromForTree(script));
t.getInput().addProvide(moduleName);
}
JSDocInfoBuilder jsDocInfo = script.getJSDocInfo() == null
? new JSDocInfoBuilder(false)
: JSDocInfoBuilder.copyFrom(script.getJSDocInfo());
if (!jsDocInfo.isPopulatedWithFileOverview()) {
jsDocInfo.recordFileOverview("");
}
// Don't check provides and requires, since most of them are auto-generated.
jsDocInfo.recordSuppressions(ImmutableSet.of("missingProvide", "missingRequire"));
script.setJSDocInfo(jsDocInfo.build());
exportMap.clear();
compiler.reportCodeChange();
}
private void rewriteRequires(Node script) {
NodeTraversal.traverseEs6(
compiler,
script,
new NodeTraversal.AbstractShallowCallback() {
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (n.isCall()
&& n.getFirstChild().matchesQualifiedName("goog.require")
&& NodeUtil.isNameDeclaration(parent.getParent())) {
visitRequire(n, parent);
}
}
/**
* Rewrites
* const foo = goog.require('bar.foo');
* to
* goog.require('bar.foo');
* const foo = bar.foo;
*/
private void visitRequire(Node requireCall, Node parent) {
String namespace = requireCall.getLastChild().getString();
if (!parent.getParent().isConst()) {
compiler.report(JSError.make(parent.getParent(), LHS_OF_GOOG_REQUIRE_MUST_BE_CONST));
}
// If the LHS is a destructuring pattern with the "shorthand" syntax,
// desugar it because otherwise the renaming will not be done correctly.
// const {x} = goog.require('y')
// becomes
// const {x: x} = goog.require('y');
if (parent.isObjectPattern()) {
for (Node key = parent.getFirstChild(); key != null; key = key.getNext()) {
if (!key.hasChildren()) {
key.addChildToBack(IR.name(key.getString()).useSourceInfoFrom(key));
}
}
}
Node replacement = NodeUtil.newQName(compiler, namespace).srcrefTree(requireCall);
parent.replaceChild(requireCall, replacement);
Node varNode = parent.getParent();
varNode.getParent().addChildBefore(
IR.exprResult(requireCall).srcrefTree(requireCall),
varNode);
}
});
}
/**
* Traverses a node tree and
* <ol>
* <li>Appends a suffix to all global variable names defined in this module.
* <li>Changes references to imported values to be property accesses on the
* imported module object.
* </ol>
*/
private class RenameGlobalVars extends AbstractPostOrderCallback {
private final String suffix;
RenameGlobalVars(String suffix) {
this.suffix = suffix;
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
JSDocInfo info = n.getJSDocInfo();
if (info != null) {
for (Node typeNode : info.getTypeNodes()) {
fixTypeNode(t, typeNode);
}
}
boolean isShorthandObjLitKey = n.isStringKey() && !n.hasChildren();
if (n.isName() || isShorthandObjLitKey) {
String name = n.getString();
if (suffix.equals(name)) {
// TODO(moz): Investigate whether we need to return early in this unlikely situation.
return;
}
Var var = t.getScope().getVar(name);
if (var != null && var.isGlobal()) {
// Avoid polluting the global namespace.
String newName = name + "$$" + suffix;
if (isShorthandObjLitKey) {
// Change {a} to {a: a$$module$foo}
n.addChildToBack(IR.name(newName).useSourceInfoIfMissingFrom(n));
} else {
n.setString(newName);
n.setOriginalName(name);
}
} else if (var == null && importMap.containsKey(name)) {
// Change to property access on the imported module object.
if (parent.isCall() && parent.getFirstChild() == n) {
parent.putBooleanProp(Node.FREE_CALL, false);
}
ModuleOriginalNamePair pair = importMap.get(name);
Node moduleAccess = NodeUtil.newQName(compiler, pair.module);
if (pair.originalName.isEmpty()) {
n.replaceWith(moduleAccess.useSourceInfoIfMissingFromForTree(n));
} else {
n.replaceWith(
IR.getprop(moduleAccess, IR.string(pair.originalName))
.useSourceInfoIfMissingFromForTree(n));
}
}
}
}
/**
* Replace type name references. Change short names to fully qualified names
* with namespace prefixes. Eg: {Foo} becomes {module$test.Foo}.
*/
private void fixTypeNode(NodeTraversal t, Node typeNode) {
if (typeNode.isString()) {
String name = typeNode.getString();
if (ModuleLoader.isPathIdentifier(name)) {
int lastSlash = name.lastIndexOf('/');
int endIndex = name.indexOf('.', lastSlash);
String localTypeName = null;
if (endIndex == -1) {
endIndex = name.length();
} else {
localTypeName = name.substring(endIndex);
}
String moduleName = name.substring(0, endIndex);
ModuleLoader.ModulePath path =
t.getInput()
.getPath()
.resolveJsModule(
moduleName,
typeNode.getSourceFileName(),
typeNode.getLineno(),
typeNode.getCharno());
if (path == null) {
path = t.getInput().getPath().resolveModuleAsPath(moduleName);
}
String globalModuleName = path.toModuleName();
typeNode.setString(
localTypeName == null ? globalModuleName : globalModuleName + localTypeName);
} else {
List<String> splitted = Splitter.on('.').limit(2).splitToList(name);
String baseName = splitted.get(0);
String rest = "";
if (splitted.size() == 2) {
rest = "." + splitted.get(1);
}
Var var = t.getScope().getVar(baseName);
if (var != null && var.isGlobal()) {
typeNode.setString(baseName + "$$" + suffix + rest);
} else if (var == null && importMap.containsKey(baseName)) {
ModuleOriginalNamePair pair = importMap.get(baseName);
if (pair.originalName.isEmpty()) {
typeNode.setString(pair.module + rest);
} else {
typeNode.setString(baseName + "$$" + pair.module + rest);
}
}
typeNode.setOriginalName(name);
}
}
for (Node child = typeNode.getFirstChild(); child != null;
child = child.getNext()) {
fixTypeNode(t, child);
}
compiler.reportCodeChange();
}
}
private static class ModuleOriginalNamePair {
private String module;
private String originalName;
private ModuleOriginalNamePair(String module, String originalName) {
this.module = module;
this.originalName = originalName;
}
@Override
public String toString() {
return "(" + module + ", " + originalName + ")";
}
}
private static class NameNodePair {
final String name;
final Node nodeForSourceInfo;
private NameNodePair(String name, Node nodeForSourceInfo) {
this.name = name;
this.nodeForSourceInfo = nodeForSourceInfo;
}
@Override
public String toString() {
return "(" + name + ", " + nodeForSourceInfo + ")";
}
}
}
| |
/**********************************************************************************
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006 The Sakai Foundation.
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package uk.ac.cam.caret.sakai.rwiki.component.service.impl;
import java.text.MessageFormat;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Stack;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.sakaiproject.alias.api.AliasService;
import org.sakaiproject.authz.api.SecurityService;
import org.sakaiproject.component.api.ComponentManager;
import org.sakaiproject.component.cover.ServerConfigurationService;
import org.sakaiproject.db.cover.SqlService;
import org.sakaiproject.email.api.DigestService;
import org.sakaiproject.entity.api.Entity;
import org.sakaiproject.entity.api.EntityAccessOverloadException;
import org.sakaiproject.entity.api.EntityCopyrightException;
import org.sakaiproject.entity.api.EntityManager;
import org.sakaiproject.entity.api.EntityNotDefinedException;
import org.sakaiproject.entity.api.EntityPermissionException;
import org.sakaiproject.entity.api.HttpAccess;
import org.sakaiproject.entity.api.Reference;
import org.sakaiproject.tool.api.Session;
import org.sakaiproject.entity.api.ResourceProperties;
import org.sakaiproject.event.api.EventTrackingService;
import org.sakaiproject.event.api.NotificationEdit;
import org.sakaiproject.event.api.NotificationService;
import org.sakaiproject.exception.IdUnusedException;
import org.sakaiproject.site.api.SiteService;
import org.sakaiproject.thread_local.api.ThreadLocalManager;
import org.sakaiproject.time.api.TimeService;
import org.sakaiproject.tool.api.SessionManager;
import org.sakaiproject.user.api.UserDirectoryService;
import org.apache.commons.lang.StringUtils;
import org.springframework.orm.hibernate3.HibernateOptimisticLockingFailureException;
import org.hibernate.HibernateException;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import uk.ac.cam.caret.sakai.rwiki.component.Messages;
import uk.ac.cam.caret.sakai.rwiki.component.dao.impl.ListProxy;
import uk.ac.cam.caret.sakai.rwiki.component.model.impl.RWikiEntityImpl;
import uk.ac.cam.caret.sakai.rwiki.model.RWikiPermissionsImpl;
import uk.ac.cam.caret.sakai.rwiki.service.api.EntityHandler;
import uk.ac.cam.caret.sakai.rwiki.service.api.PageLinkRenderer;
import uk.ac.cam.caret.sakai.rwiki.service.api.RWikiObjectService;
import uk.ac.cam.caret.sakai.rwiki.service.api.RWikiSecurityService;
import uk.ac.cam.caret.sakai.rwiki.service.api.RenderService;
import uk.ac.cam.caret.sakai.rwiki.service.api.dao.ObjectProxy;
import uk.ac.cam.caret.sakai.rwiki.service.api.dao.RWikiCurrentObjectDao;
import uk.ac.cam.caret.sakai.rwiki.service.api.dao.RWikiHistoryObjectDao;
import uk.ac.cam.caret.sakai.rwiki.service.api.model.RWikiCurrentObject;
import uk.ac.cam.caret.sakai.rwiki.service.api.model.RWikiEntity;
import uk.ac.cam.caret.sakai.rwiki.service.api.model.RWikiHistoryObject;
import uk.ac.cam.caret.sakai.rwiki.service.api.model.RWikiObject;
import uk.ac.cam.caret.sakai.rwiki.service.api.model.RWikiPermissions;
import uk.ac.cam.caret.sakai.rwiki.service.exception.CreatePermissionException;
import uk.ac.cam.caret.sakai.rwiki.service.exception.PermissionException;
import uk.ac.cam.caret.sakai.rwiki.service.exception.ReadPermissionException;
import uk.ac.cam.caret.sakai.rwiki.service.exception.UpdatePermissionException;
import uk.ac.cam.caret.sakai.rwiki.service.exception.VersionException;
import uk.ac.cam.caret.sakai.rwiki.service.message.api.PreferenceService;
import uk.ac.cam.caret.sakai.rwiki.utils.NameHelper;
import uk.ac.cam.caret.sakai.rwiki.utils.TimeLogger;
/**
* @author andrew
*/
// FIXME: Component
public class RWikiObjectServiceImpl implements RWikiObjectService
{
private static Logger log = LoggerFactory.getLogger(RWikiObjectServiceImpl.class);
private RWikiCurrentObjectDao cdao;
private RWikiHistoryObjectDao hdao;
// dependancy
/**
* Contains a map of handler beans injected
*/
private Map m_handlers = null;
public String createTemplatePageName = "default_template"; //$NON-NLS-1$
private RWikiSecurityService wikiSecurityService;
private RenderService renderService;
private PreferenceService preferenceService;
private EntityManager entityManager;
private NotificationService notificationService;
private SessionManager sessionManager;
private EventTrackingService eventTrackingService;
private SiteService siteService;
private ThreadLocalManager threadLocalManager;
private TimeService timeService;
private DigestService digestService;
private SecurityService securityService;
/** Configuration: to run the ddl on init or not. */
protected boolean autoDdl = false;
private AliasService aliasService;
private UserDirectoryService userDirectoryService;
private int maxReferencesStringSize = 4000;
private boolean trackReads = ServerConfigurationService.getBoolean("wiki.trackreads", false);
/**
* Configuration: to run the ddl on init or not.
*
* @param value
* the auto ddl value.
*/
public void setAutoDdl(String value)
{
autoDdl = Boolean.valueOf(value).booleanValue();
}
/**
* Register this as an EntityProducer
*/
public void init()
{
log.debug("init start"); //$NON-NLS-1$
ComponentManager cm = org.sakaiproject.component.cover.ComponentManager
.getInstance();
entityManager = (EntityManager) load(cm, EntityManager.class.getName());
notificationService = (NotificationService) load(cm,
NotificationService.class.getName());
sessionManager = (SessionManager) load(cm, SessionManager.class
.getName());
eventTrackingService = (EventTrackingService) load(cm,
EventTrackingService.class.getName());
siteService = (SiteService) load(cm, SiteService.class.getName());
threadLocalManager = (ThreadLocalManager) load(cm,
ThreadLocalManager.class.getName());
timeService = (TimeService) load(cm, TimeService.class.getName());
digestService = (DigestService) load(cm, DigestService.class.getName());
securityService = (SecurityService) load(cm, SecurityService.class
.getName());
wikiSecurityService = (RWikiSecurityService) load(cm,
RWikiSecurityService.class.getName());
renderService = (RenderService) load(cm, RenderService.class.getName());
preferenceService = (PreferenceService) load(cm,
PreferenceService.class.getName());
userDirectoryService = (UserDirectoryService) load(cm,UserDirectoryService.class.getName());
entityManager.registerEntityProducer(this,
RWikiObjectService.REFERENCE_ROOT);
if (ServerConfigurationService.getBoolean("wiki.notification", true)) //$NON-NLS-1$
{
// Email notification
// register a transient notification for resources
NotificationEdit edit = notificationService
.addTransientNotification();
// set functions
edit.setFunction(RWikiObjectService.EVENT_RESOURCE_ADD);
edit.addFunction(RWikiObjectService.EVENT_RESOURCE_WRITE);
edit.addFunction(RWikiObjectService.EVENT_RESOURCE_READ);
// set the filter to any site related resource
edit.setResourceFilter(RWikiObjectService.REFERENCE_ROOT);
// %%% is this the best we can do? -ggolden
// set the action
edit.setAction(new SiteEmailNotificationRWiki(this,
this.renderService, this.preferenceService,
this.siteService, this.securityService, this.entityManager,
this.threadLocalManager, this.timeService,
this.digestService, this.userDirectoryService));
}
try
{
if (autoDdl)
{
SqlService.getInstance().ddl(this.getClass().getClassLoader(),
"sakai_rwiki");
}
}
catch (Exception ex)
{
log.error("Perform additional SQL setup", ex);
}
maxReferencesStringSize = ServerConfigurationService.getInt("wiki.maxReferences",4000);
log.debug("init end"); //$NON-NLS-1$
}
private Object load(ComponentManager cm, String name)
{
Object o = cm.get(name);
if (o == null)
{
log.error("Cant find Spring component named " + name); //$NON-NLS-1$
}
return o;
}
/*
* (non-Javadoc)
*
* @see uk.ac.cam.caret.sakai.rwiki.service.api.RWikiObjectService#getRWikiObject(java.lang.String,
* java.lang.String, java.lang.String)
*/
public RWikiCurrentObject getRWikiObject(String name, String realm)
throws PermissionException
{
return getRWikiObject(name, realm, null, createTemplatePageName);
}
/*
* (non-Javadoc)
*
* @see uk.ac.cam.caret.sakai.rwiki.service.api.RWikiObjectService#getRWikiObject(java.lang.String,
* java.lang.String, java.lang.String, java.lang.String)
*/
public RWikiCurrentObject getRWikiObject(String name, String realm,
RWikiObject ignore, String templatePage) throws PermissionException
{
long start = System.currentTimeMillis();
String user = sessionManager.getCurrentSessionUserId();
try
{
if (log.isDebugEnabled())
{
log.debug("Looking for object with name " + name + " in realm " //$NON-NLS-1$ //$NON-NLS-2$
+ realm + " for user " + user); //$NON-NLS-1$
}
// May throw Permission Exception...
// only globalise if not already
name = NameHelper.globaliseName(name, realm);
long start2 = System.currentTimeMillis();
RWikiCurrentObject returnable;
try
{
returnable = cdao.findByGlobalName(name);
}
finally
{
long finish = System.currentTimeMillis();
TimeLogger.printTimer("dao.findByGlobalName: " + name, start2, //$NON-NLS-1$
finish);
}
if (returnable == null)
{
String permissionsReference = wikiSecurityService
.createPermissionsReference(realm);
if (!wikiSecurityService
.checkCreatePermission(permissionsReference))
{
throw new CreatePermissionException("User: " + user //$NON-NLS-1$
+ " cannot create pages in realm: " + realm); //$NON-NLS-1$
}
returnable = cdao.createRWikiObject(name, realm);
// zero in on the correct space.
String pageSpace = NameHelper.localizeSpace(name, realm);
String defTemplate = NameHelper.globaliseName(templatePage,
pageSpace);
RWikiCurrentObject template = cdao
.findByGlobalName(defTemplate);
if (template != null)
{
returnable.setContent(template.getContent());
returnable.setPermissions(template.getPermissions());
returnable.setGroupWrite(true);
returnable.setGroupRead(true); //SAK SAK-8234
returnable.setUser(user);
returnable.setOwner(user);
returnable.setRealm(realm);
returnable.setReferenced(template.getReferenced());
returnable.setSha1(template.getSha1());
}
return returnable;
}
else if (wikiSecurityService
.checkRead((RWikiEntity) getEntity(returnable)))
{
// Allowed to read this object
return returnable;
}
else
{
throw new ReadPermissionException(user, returnable);
}
}
finally
{
long finish = System.currentTimeMillis();
TimeLogger.printTimer("dao.GetRWikiObject: " + name + ", " + user //$NON-NLS-1$ //$NON-NLS-2$
+ ", " + realm, start, finish); //$NON-NLS-1$
}
}
/*
* (non-Javadoc)
*
* @see uk.ac.cam.caret.sakai.rwiki.service.api.RWikiObjectService#findByGlobalNameAndContents(java.lang.String,
* java.lang.String, java.lang.String)
*/
public List search(String criteria, String realm)
throws PermissionException
{
String user = sessionManager.getCurrentSessionUserId();
String permissionsReference = wikiSecurityService
.createPermissionsReference(realm);
if (!wikiSecurityService.checkSearchPermission(permissionsReference))
{
throw new ReadPermissionException(user, realm);
}
return cdao.findByGlobalNameAndContents(criteria, user, realm);
}
public RWikiCurrentObjectDao getRWikiCurrentObjectDao()
{
return cdao;
}
public void setRWikiCurrentObjectDao(RWikiCurrentObjectDao cdao)
{
this.cdao = cdao;
}
public RWikiHistoryObjectDao getRWikiHistoryObjectDao()
{
return hdao;
}
public void setRWikiHistoryObjectDao(RWikiHistoryObjectDao hdao)
{
this.hdao = hdao;
}
public void update(String name, String realm, Date version, String content,
RWikiPermissions permissions) throws PermissionException,
VersionException
{
String user = sessionManager.getCurrentSessionUserId();
update(name, user, realm, version, content, permissions);
}
/**
* This will update an object setting the modified by and owner using the
* supplied user and using the <b>current user</b> for permissions. The
* reason this is private and is in the service at all, is that we need to
* be able to move rwiki objects about on behalf of annother user
*
* @param name
* @param user
* The user to set owner and modified by, normally the current user
* @param realm
* @param version
* @param content
* @param permissions
* @throws PermissionException
* @throws VersionException
*/
private void update(String name, String user, String realm, Date version,
String content, RWikiPermissions permissions)
throws PermissionException, VersionException, RuntimeException
{
// May throw ReadPermissionException...
RWikiCurrentObject rwo = getRWikiObject(name, realm);
RWikiHistoryObject rwho = null;
if (wikiSecurityService.checkUpdate((RWikiEntity) getEntity(rwo)))
{
rwho = updateContent(rwo, content, version);
}
else
{
throw new UpdatePermissionException("User: " + user //$NON-NLS-1$
+ " doesn't have permission to update: " + name); //$NON-NLS-1$
}
if (permissions != null)
{
if (wikiSecurityService.checkAdmin((RWikiEntity) getEntity(rwo)))
{
rwo.setPermissions(permissions);
}
else
{
throw new UpdatePermissionException("User: " + user //$NON-NLS-1$
+ " doesn't have permission to update and admin: " //$NON-NLS-1$
+ name);
}
}
rwo.setUser(user);
if (rwo.getOwner() == null)
{
rwo.setOwner(user);
}
try
{
cdao.update(rwo, rwho);
Entity e = getEntity(rwo);
int revision = 1;
try
{
revision = rwo.getRevision().intValue();
}
catch ( Exception ex )
{
}
int notiPriority = NotificationService.PREF_IMMEDIATE;
if (RWikiObjectService.SMALL_CHANGE_IN_THREAD.equals(threadLocalManager
.get(RWikiObjectService.SMALL_CHANGE_IN_THREAD)))
{
notiPriority = NotificationService.PREF_NONE;
}
if ( revision == 1 )
{
eventTrackingService.post(eventTrackingService.newEvent(
EVENT_RESOURCE_ADD, e.getReference(), true,
notiPriority));
}
else
{
eventTrackingService.post(eventTrackingService.newEvent(
EVENT_RESOURCE_WRITE, e.getReference(), true,
notiPriority));
}
}
catch (HibernateOptimisticLockingFailureException e)
{
throw new VersionException("Version has changed since: " + version, //$NON-NLS-1$
e);
}
catch (HibernateException e)
{
log.info("Caught hibernate exception, update failed."+e.getMessage());
throw new RuntimeException("An update could not be made to this wiki page. A possible cause is that you have too many links.");
}
}
public void update(String name, String realm, Date version, String content)
throws PermissionException, VersionException
{
// May throw ReadPermissionException...
update(name, realm, version, content, null);
}
public void update(String name, String realm, Date version,
RWikiPermissions permissions) throws PermissionException,
VersionException
{
if (permissions == null)
{
throw new IllegalArgumentException("permissions must not be null"); //$NON-NLS-1$
}
String user = sessionManager.getCurrentSessionUserId();
RWikiCurrentObject rwo = getRWikiObject(name, realm);
if (wikiSecurityService.checkAdmin((RWikiEntity) getEntity(rwo)))
{
RWikiHistoryObject rwho = hdao.createRWikiHistoryObject(rwo);
rwo.setRevision(Integer.valueOf(rwo.getRevision().intValue() + 1));
rwo.setPermissions(permissions);
rwo.setVersion(version);
try
{
cdao.update(rwo, rwho);
// track it
Entity e = getEntity(rwo);
int revision = 1;
try
{
revision = rwo.getRevision().intValue();
}
catch ( Exception ex )
{
}
int notiPriority = NotificationService.PREF_IMMEDIATE;
if (threadLocalManager.get(RWikiObjectService.SMALL_CHANGE_IN_THREAD) != null)
{
notiPriority = NotificationService.PREF_NONE;
}
if ( revision == 1 )
{
eventTrackingService.post(eventTrackingService.newEvent(
EVENT_RESOURCE_ADD, e.getReference(), true,
notiPriority));
}
else
{
eventTrackingService.post(eventTrackingService.newEvent(
EVENT_RESOURCE_WRITE, e.getReference(), true,
notiPriority));
}
}
catch (HibernateOptimisticLockingFailureException e)
{
throw new VersionException("Version has changed since: " //$NON-NLS-1$
+ version, e);
}
}
else
{
throw new UpdatePermissionException("User: " + user //$NON-NLS-1$
+ " doesn't have permission to update and admin: " + name); //$NON-NLS-1$
}
}
private RWikiHistoryObject updateContent(RWikiCurrentObject rwo,
String content, Date version)
{
// We set the version in order to allow hibernate to tell us if the
// object has been changed since we last looked at it.
if (version != null)
{
rwo.setVersion(version);
}
if (content != null && !content.equals(rwo.getContent()))
{
// create a history instance
RWikiHistoryObject rwho = hdao.createRWikiHistoryObject(rwo);
// set the content and increment the revision
rwo.setContent(content.replaceAll("\r\n?", "\n")); //$NON-NLS-1$ //$NON-NLS-2$
rwo.setRevision(Integer.valueOf(rwo.getRevision().intValue() + 1));
// render to get a list of links
final HashSet referenced = new HashSet();
// Links should be globalised against the page space!
final String currentSpace = NameHelper.localizeSpace(rwo.getName(),
rwo.getRealm());
PageLinkRenderer plr = new PageLinkRenderer()
{
public void appendLink(StringBuffer buffer, String name, String view, String anchor, boolean autoGenerated)
{
if (!autoGenerated)
{
this.appendLink(buffer, name, view, anchor);
}
}
public void appendLink(StringBuffer buffer, String name,
String view)
{
this.appendLink(buffer, name, view, null);
}
public void appendLink(StringBuffer buffer, String name,
String view, String anchor)
{
referenced
.add(NameHelper.globaliseName(name, currentSpace));
}
public void appendCreateLink(StringBuffer buffer, String name,
String view)
{
referenced
.add(NameHelper.globaliseName(name, currentSpace));
}
public boolean isCachable()
{
return false; // should not cache this render op
}
public boolean canUseCache()
{
return false;
}
public void setCachable(boolean cachable)
{
//do nothing
}
public void setUseCache(boolean b)
{
//do nothing
}
};
renderService.renderPage(rwo, currentSpace, plr);
// process the references
StringBuffer sb = extractReferences(rwo, referenced);
rwo.setReferenced(sb.toString());
return rwho;
}
return null;
}
/**
* Add page references to the rwiki object. Limit length of the
* string to save to a fixed value that will cover the common cases
* without using resources for degenerate cases.
* @param rwo - The rwiki object
* @param referenced - the hash of the references to save.
*/
public StringBuffer extractReferences(RWikiCurrentObject rwo, final HashSet referenced) {
StringBuffer sb = new StringBuffer();
Iterator i = referenced.iterator();
String next = null;
while (i.hasNext())
{
next = (String) i.next();
int referencedLength = sb.length()+4+next.length();
if (referencedLength >= maxReferencesStringSize) { // SAK-12115
break;
}
else
{
sb.append("::").append(next); //$NON-NLS-1$
}
}
sb.append("::"); //$NON-NLS-1$
return sb;
}
public boolean exists(String name, String space)
{
long start = System.currentTimeMillis();
try
{
String globalName = NameHelper.globaliseName(name, space);
return cdao.exists(globalName);
}
finally
{
long finish = System.currentTimeMillis();
TimeLogger.printTimer("Exists: " + name, start, finish); //$NON-NLS-1$
}
}
public List findChangedSince(Date since, String realm)
{
//
// if (!securityService.checkSearchPermission(user, realm)) {
// throw new ReadPermissionException(user, realm);
// }
// TODO Permissions ?
return cdao.findChangedSince(since, realm);
}
/*
* (non-Javadoc)
*
* @see uk.ac.cam.caret.sakai.rwiki.service.api.RWikiObjectService#findReferencingPages(java.lang.String)
*/
public List findReferencingPages(String name)
{
// TODO Permissions ?
return cdao.findReferencingPages(name);
}
/*
* (non-Javadoc)
*
* @see uk.ac.cam.caret.sakai.rwiki.service.api.RWikiObjectService#revert(java.lang.String,
* java.lang.String, java.lang.String, java.util.Date, int)
*/
public void revert(String name, String realm, Date version, int revision)
{
// TODO Permissions ?
RWikiCurrentObject rwikiObject = getRWikiObject(name, realm);
String content = hdao.getRWikiHistoryObject(rwikiObject, revision)
.getContent();
update(name, realm, version, content);
}
/*
* (non-Javadoc)
*
* @see uk.ac.cam.caret.sakai.rwiki.service.api.RWikiObjectService#getRWikiObject(java.lang.String)
*/
public RWikiCurrentObject getRWikiObject(RWikiObject reference)
{
// TODO Permissions ?
return cdao.getRWikiCurrentObject(reference);
}
/*
* (non-Javadoc)
*
* @see uk.ac.cam.caret.sakai.rwiki.service.api.RWikiObjectService#getRWikiHistoryObject(java.lang.String,
* int)
*/
public RWikiHistoryObject getRWikiHistoryObject(RWikiObject reference,
int revision)
{
// TODO Permissions ?
return hdao.getRWikiHistoryObject(reference, revision);
}
/*
* (non-Javadoc)
*
* @see uk.ac.cam.caret.sakai.rwiki.service.api.RWikiObjectService#findRWikiHistoryObjects(java.lang.String)
*/
public List findRWikiHistoryObjects(RWikiObject reference)
{
// TODO Permissions ?
return hdao.findRWikiHistoryObjects(reference);
}
public List findRWikiHistoryObjectsInReverse(RWikiObject reference)
{
// TODO Permissions ?
return hdao.findRWikiHistoryObjectsInReverse(reference);
}
/**
* @return Returns the createTemplatePageName.
*/
public String getCreateTemplatePageName()
{
return createTemplatePageName;
}
/**
* @param createTemplatePageName
* The createTemplatePageName to set.
*/
public void setCreateTemplatePageName(String createTemplatePageName)
{
this.createTemplatePageName = createTemplatePageName;
}
/*
* (non-Javadoc)
*
* @see uk.ac.cam.caret.sakai.rwiki.service.api.RWikiObjectService#findRWikiSubPages(java.lang.String)
*/
public List findRWikiSubPages(String globalParentPageName)
{
// TODO Permissions ?
return cdao.findRWikiSubPages(globalParentPageName);
}
/*
* (non-Javadoc)
*
* @see uk.ac.cam.caret.sakai.rwiki.service.api.RWikiObjectService#updateNewComment(java.lang.String,
* java.lang.String, java.lang.String, java.util.Date,
* java.lang.String)
*/
public void updateNewComment(String name, String realm, Date version,
String content) throws PermissionException, VersionException
{
int retries = 0;
while (retries < 5)
{
try
{
RWikiObject lastComment = cdao.findLastRWikiSubPage(name);
int cnum = 0;
if (lastComment != null)
{
String lastCommentName = lastComment.getName();
int lastp = lastCommentName.lastIndexOf("."); //$NON-NLS-1$
if (lastp >= 0)
{
try {
cnum = Integer.parseInt(lastCommentName
.substring(lastp + 1)) + 1;
} catch ( Exception ex) {
// this is Ok
}
}
}
String newCommentName = MessageFormat.format(
"{0}.{1,number,000}", new Object[] { name, //$NON-NLS-1$
Integer.valueOf(cnum) });
update(newCommentName, realm, version, content);
break;
}
catch (VersionException e)
{
if (retries >= 5) throw e;
retries++;
}
}
}
/*
* (non-Javadoc)
*
* @see uk.ac.cam.caret.sakai.rwiki.service.api.RWikiObjectService#createListProxy(java.util.List,
* uk.ac.cam.caret.sakai.rwiki.service.api.dao.ObjectProxy)
*/
public List createListProxy(List commentsList, ObjectProxy lop)
{
return new ListProxy(commentsList, lop);
}
/*
* (non-Javadoc)
*
* @see uk.ac.cam.caret.sakai.rwiki.service.api.RWikiObjectService#createNewRWikiCurrentObject()
*/
public RWikiObject createNewRWikiCurrentObject()
{
// RWikiCurrentObjectImpl rwco = new RWikiCurrentObjectImpl();
// rwco.setRwikiObjectContentDao(c)
return cdao.createRWikiObject("dummy", "dummy"); //$NON-NLS-1$ //$NON-NLS-2$
}
/*
* (non-Javadoc)
*
* @see uk.ac.cam.caret.sakai.rwiki.service.api.RWikiObjectService#createNewRWikiPermissionsImpl()
*/
public RWikiPermissions createNewRWikiPermissionsImpl()
{
return new RWikiPermissionsImpl();
}
/**
* {@inheritDoc}
*/
public String getLabel()
{
return REFERENCE_LABEL;
}
/**
* {@inheritDoc}
*/
public boolean willArchiveMerge()
{
return true;
}
/**
* {@inheritDoc}
*/
public void contextCreated(String context, boolean toolPlacement)
{
if (toolPlacement) enableWiki(context);
}
/**
* {@inheritDoc}
*/
public void contextUpdated(String context, boolean toolPlacement)
{
if (toolPlacement) enableWiki(context);
}
/**
* {@inheritDoc}
*/
public void contextDeleted(String context, boolean toolPlacement)
{
disableWiki(context);
}
/**
* {@inheritDoc} Archive all the wiki pages in the site as a single
* collection
*/
public String archive(String siteId, Document doc, Stack stack,
String archivePath, List attachments)
{
// TODO Permissions ?
// prepare the buffer for the results log
StringBuffer results = new StringBuffer();
results.append(Messages.getString("RWikiObjectServiceImpl.32")).append(siteId).append("\n"); //$NON-NLS-1$ //$NON-NLS-2$
log.debug("archiving Wiki Pages for " + siteId); //$NON-NLS-1$
int npages = 0;
int nversions = 0;
try
{
String defaultRealm = siteService.getSite(siteId).getReference();
wikiSecurityService
.checkAdminPermission(RWikiObjectService.REFERENCE_ROOT
+ defaultRealm);
// start with an element with our very own name
Element element = doc.createElement(APPLICATION_ID);
((Element) stack.peek()).appendChild(element);
stack.push(element);
try
{
List l = cdao.findRWikiSubPages("/site/" + siteId); //$NON-NLS-1$
for (Iterator i = l.iterator(); i.hasNext();)
{
RWikiObject rwo = (RWikiObject) i.next();
RWikiEntity rwe = (RWikiEntity) getEntity(rwo);
log.debug("Archiving " + rwo.getName()); //$NON-NLS-1$
rwe.toXml(doc, stack);
npages++;
List lh = this.findRWikiHistoryObjects(rwo);
if (lh != null)
{
for (Iterator ih = lh.iterator(); ih.hasNext();)
{
RWikiObject rwoh = (RWikiObject) ih.next();
RWikiEntity rwoeh = (RWikiEntity) getEntity(rwoh);
log.debug("Archiving " + rwoh.getName() //$NON-NLS-1$
+ " version " + rwoh.getVersion()); //$NON-NLS-1$
rwoeh.toXml(doc, stack);
nversions++;
}
}
}
}
catch (Exception any)
{
any.printStackTrace();
results.append(Messages.getString("RWikiObjectServiceImpl.31") + siteId //$NON-NLS-1$
+ " " + any.toString() + "\n"); //$NON-NLS-1$ //$NON-NLS-2$
}
results.append(Messages.getString("RWikiObjectServiceImpl.30")).append(npages).append( //$NON-NLS-1$
Messages.getString("RWikiObjectServiceImpl.43")).append(nversions).append(Messages.getString("RWikiObjectServiceImpl.44")); //$NON-NLS-1$ //$NON-NLS-2$
stack.pop();
}
catch (IdUnusedException ex)
{
}
return results.toString();
}
/**
* {@inheritDoc} The archive contains the current version, followed by
* historical versions If any of these aer out of order, only versions upto
* the first encoundered version will be merged. If the page exists, then
* only version that dont exist, and are not already present will be added
* in practice this means all the pages in the set will be rejected.
*/
public String merge(String siteId, Element root, String archivePath,
String fromSiteId, Map attachmentNames, Map userIdTrans,
Set userListAllowImport)
{
log.info(" wiki Merge"); //$NON-NLS-1$
// TODO Permissions ?
// stolen :) from BaseContentService
// get the system name: FROM_WT, FROM_CT, FROM_SAKAI
// String source = null;
// root: <service> node
// Node parent = root.getParentNode(); // parent: <archive> node
// containing
// "system"
// if (parent.getNodeType() == Node.ELEMENT_NODE) {
// Element parentEl = (Element) parent;
// source = parentEl.getAttribute("system");
// }
// prepare the buffer for the results log
StringBuffer results = new StringBuffer();
int nversions_reject = 0;
int npages = 0;
int nversions = 0;
int npages_fail = 0;
int npages_errors = 0;
try
{
String defaultRealm = siteService.getSite(siteId).getReference();
wikiSecurityService
.checkAdminPermission(RWikiObjectService.REFERENCE_ROOT
+ defaultRealm);
NodeList children = root.getChildNodes();
final int length = children.getLength();
log.info("Archive has " + length + " pages "); //$NON-NLS-1$ //$NON-NLS-2$
for (int i = 0; i < length; i++)
{
Node child = children.item(i);
if (child.getNodeType() != Node.ELEMENT_NODE) continue;
Element element = (Element) child;
try
{
RWikiCurrentObject archiverwo = cdao.createRWikiObject(
"dummy", "dummy"); //$NON-NLS-1$ //$NON-NLS-2$
RWikiEntity rwe = (RWikiEntity) getEntity(archiverwo);
rwe.fromXml(element, defaultRealm);
log.info(" Merging " + archiverwo.getRevision() + ":" //$NON-NLS-1$ //$NON-NLS-2$
+ rwe.getReference());
// clear the ID to remove hibernate session issues and
// recreate
// a new id issues
archiverwo.setId(null);
String pageName = archiverwo.getName();
if (exists(pageName, defaultRealm))
{
// page exists, add to history, if the version does not
// exist
RWikiObject rwo = getRWikiObject(pageName, defaultRealm);
if (archiverwo.getRevision().intValue() >= rwo
.getRevision().intValue())
{
nversions_reject++;
results
.append(Messages.getString("RWikiObjectServiceImpl.29")) //$NON-NLS-1$
.append(rwo.getName())
.append(Messages.getString("RWikiObjectServiceImpl.28")) //$NON-NLS-1$
.append(rwo.getRevision())
.append(
Messages.getString("RWikiObjectServiceImpl.54")) //$NON-NLS-1$
.append(archiverwo.getRevision())
.append(
Messages.getString("RWikiObjectServiceImpl.55")) //$NON-NLS-1$
.append(
Messages.getString("RWikiObjectServiceImpl.56")) //$NON-NLS-1$
.append(
Messages.getString("RWikiObjectServiceImpl.57")); //$NON-NLS-1$
}
else
{
RWikiHistoryObject rwho = getRWikiHistoryObject(
rwo, archiverwo.getRevision().intValue());
if (rwho == null)
{
rwho = hdao
.createRWikiHistoryObject(archiverwo);
// connect to the correct master object
rwho.setRwikiobjectid(rwo.getId());
// save
hdao.update(rwho);
rwho = getRWikiHistoryObject(rwo, archiverwo
.getRevision().intValue());
nversions++;
}
else
{
nversions_reject++;
results
.append(Messages.getString("RWikiObjectServiceImpl.58")) //$NON-NLS-1$
.append(rwo.getName())
.append(
Messages.getString("RWikiObjectServiceImpl.59")) //$NON-NLS-1$
.append(rwo.getRevision())
.append(
Messages.getString("RWikiObjectServiceImpl.60") //$NON-NLS-1$
+ Messages.getString("RWikiObjectServiceImpl.61") //$NON-NLS-1$
+ Messages.getString("RWikiObjectServiceImpl.62") //$NON-NLS-1$
+ Messages.getString("RWikiObjectServiceImpl.63")); //$NON-NLS-1$
}
}
}
else
{
// page does not exist, create
String newUser = (String) userIdTrans.get(archiverwo
.getOwner());
if (newUser == null) newUser = archiverwo.getOwner();
// go direct, if we use the utility methods, all sorts
// of
// things get reset, which is bad
cdao.update(archiverwo, null);
RWikiObject savedrwo = getRWikiObject(archiverwo
.getName(), archiverwo.getRealm());
if (archiverwo.getSha1().equals(savedrwo.getSha1()))
{
npages++;
}
else
{
npages_errors++;
results.append(Messages.getString("RWikiObjectServiceImpl.64")).append( //$NON-NLS-1$
savedrwo.getName()).append(Messages.getString("RWikiObjectServiceImpl.65")) //$NON-NLS-1$
.append(savedrwo.getRevision()).append(
Messages.getString("RWikiObjectServiceImpl.66")).append( //$NON-NLS-1$
savedrwo.getVersion().getTime())
.append(Messages.getString("RWikiObjectServiceImpl.67")).append( //$NON-NLS-1$
savedrwo.getVersion()).append("\n"); //$NON-NLS-1$
results
.append(
Messages.getString("RWikiObjectServiceImpl.69")) //$NON-NLS-1$
.append(archiverwo.getSha1()).append(
Messages.getString("RWikiObjectServiceImpl.70")).append( //$NON-NLS-1$
savedrwo.getSha1()).append(
Messages.getString("RWikiObjectServiceImpl.71")).append( //$NON-NLS-1$
archiverwo.getContent()).append(
Messages.getString("RWikiObjectServiceImpl.72")).append( //$NON-NLS-1$
savedrwo.getContent()).append("\n"); //$NON-NLS-1$
}
}
}
catch (Exception ex)
{
npages_fail++;
log.error("Failed to add page ", ex); //$NON-NLS-1$
results.append(Messages.getString("RWikiObjectServiceImpl.75")).append( //$NON-NLS-1$
element.getAttribute(Messages.getString("RWikiObjectServiceImpl.76"))).append( //$NON-NLS-1$
Messages.getString("RWikiObjectServiceImpl.77")).append( //$NON-NLS-1$
element.getAttribute(Messages.getString("RWikiObjectServiceImpl.78"))).append( //$NON-NLS-1$
Messages.getString("RWikiObjectServiceImpl.79")).append(ex.getMessage()).append("\n"); //$NON-NLS-1$ //$NON-NLS-2$
}
}
}
catch (IdUnusedException ex)
{
results.append(Messages.getString("RWikiObjectServiceImpl.81")).append( //$NON-NLS-1$
siteId).append(" :").append(ex.getMessage()).append("\n"); //$NON-NLS-1$ //$NON-NLS-2$
}
results.append(Messages.getString("RWikiObjectServiceImpl.84")).append(Messages.getString("RWikiObjectServiceImpl.85")) //$NON-NLS-1$ //$NON-NLS-2$
.append(npages).append(Messages.getString("RWikiObjectServiceImpl.86")).append(nversions) //$NON-NLS-1$
.append(Messages.getString("RWikiObjectServiceImpl.87")).append(nversions_reject).append( //$NON-NLS-1$
Messages.getString("RWikiObjectServiceImpl.88")).append(npages_fail).append(Messages.getString("RWikiObjectServiceImpl.89")) //$NON-NLS-1$ //$NON-NLS-2$
.append(npages_errors).append(Messages.getString("RWikiObjectServiceImpl.90")); //$NON-NLS-1$
return results.toString();
}
/**
* {@inheritDoc}
*/
public String[] myToolIds()
{
String[] toolIds = { "sakai.rwiki" }; //$NON-NLS-1$
return toolIds;
}
/**
* {@inheritDoc} Only the current version of a page is imported, history is
* left behind.
*/
public void transferCopyEntities(String fromContext, String toContext,
List ids)
{
log.debug("==================Doing WIki transfer"); //$NON-NLS-1$
if (fromContext.equals(toContext))
{
log
.debug("===================Source and Target Context are identical, transfer ignored"); //$NON-NLS-1$
return;
}
// FIXME this needs to be moved out to a method!
if (!fromContext.startsWith("/")) //$NON-NLS-1$
{
fromContext = "/site/" + fromContext; //$NON-NLS-1$
}
if (!toContext.startsWith("/")) //$NON-NLS-1$
{
toContext = "/site/" + toContext; //$NON-NLS-1$
}
if (fromContext.endsWith("/") && fromContext.length() > 1) { //$NON-NLS-1$
fromContext = fromContext.substring(0, fromContext.length() - 1);
}
if (toContext.endsWith("/") && toContext.length() > 1) { //$NON-NLS-1$
toContext = toContext.substring(0, toContext.length() - 1);
}
log.debug("=================Locating Pages in from Content of " //$NON-NLS-1$
+ fromContext);
List pages = findRWikiSubPages(fromContext.length() > 1 ? fromContext + "/" : fromContext); //$NON-NLS-1$
log.debug("=================Found " + pages.size() + " Pages"); //$NON-NLS-1$ //$NON-NLS-2$
for (Iterator i = pages.iterator(); i.hasNext();)
{
RWikiObject rwo = (RWikiObject) i.next();
RWikiEntity rwe = (RWikiEntity) getEntity(rwo);
wikiSecurityService.checkAdmin(rwe);
// might want to check admin on source and target site ?
boolean transfer = true;
// if the list exists, is this id in the list ?
if (ids != null && ids.size() > 0)
{
transfer = false;
for (Iterator j = ids.iterator(); j.hasNext() && !transfer;)
{
String id = (String) j.next();
if (id.equals(rwo.getRwikiobjectid()))
{
transfer = true;
}
}
}
// ok to transfer
if (transfer)
{
String pageName = rwo.getName();
log.debug("================Transfering page " + pageName //$NON-NLS-1$
+ " from " + rwo.getRealm() + " to " + toContext); //$NON-NLS-1$ //$NON-NLS-2$
// relocate the page name
pageName = NameHelper.localizeName(pageName, NameHelper
.localizeSpace(pageName, rwo.getRealm()));
pageName = NameHelper.globaliseName(pageName, toContext);
try
{
// create a brand new page containing the content,
// this does not copy prior versions
RWikiCurrentObject transferPage = null;
if (exists(pageName, toContext))
{
transferPage = getRWikiObject(pageName, toContext);
update(pageName, toContext, transferPage.getVersion(),
rwo.getContent(), rwo.getPermissions());
}
else
{
String user = sessionManager.getCurrentSessionUserId();
String permissionsReference = wikiSecurityService
.createPermissionsReference(toContext);
if (!wikiSecurityService
.checkCreatePermission(permissionsReference))
{
throw new CreatePermissionException("User: " + user //$NON-NLS-1$
+ " cannot create pages in realm: " //$NON-NLS-1$
+ pageName);
}
update(pageName, toContext, new Date(), rwo
.getContent(), rwo.getPermissions());
}
}
catch (Throwable t)
{
log.error("================Failed to import " + pageName //$NON-NLS-1$
+ " from " + fromContext + " to " + toContext); //$NON-NLS-1$ //$NON-NLS-2$
}
}
else
{
log.debug("=============Ignoring transfer of " + rwo.getName()); //$NON-NLS-1$
}
}
}
/**
* {@inheritDoc} The parsing process iterates though a list of regular
* expressions to generate a match
*/
public boolean parseEntityReference(String reference, Reference ref)
{
if (!reference.startsWith(REFERENCE_ROOT)) return false;
// example reference: /wiki/site/c7bc194b-b215-4281-a1ac-8ed2ca2014e6/home.
String[] parts = StringUtils.split(reference, Entity.SEPARATOR);
String context = null;
// the first part will be null, then next the service, and the fourth will be the (worksite) context
if ( parts.length > 2 )
{
context = parts[2];
if ( context.endsWith(".") )
context = context.substring(0, context.length()-1 );
}
// Translate context alias into site id if necessary
if ((context != null) && (context.length() > 0))
{
if (!siteService.siteExists(context))
{
try
{
String newContext = aliasService.getTarget(context);
if (newContext.startsWith(SiteService.REFERENCE_ROOT)) // only support site aliases
{
reference = reference.replaceFirst(SiteService.REFERENCE_ROOT + Entity.SEPARATOR + context, newContext);
ref.updateReference(reference);
}
}
catch (Exception e)
{
log.debug(".parseEntityReference(): " + e.toString());
}
}
}
EntityHandler eh = findEntityReferenceMatch(reference);
if (eh != null) {
eh.setReference(APPLICATION_ID, ref, reference);
}
return true;
}
/**
* {@inheritDoc}
*/
public String getEntityDescription(Reference ref)
{
checkReference(ref);
EntityHandler eh = findEntityHandler(ref);
Entity e = getEntity(ref, eh);
return eh.getDescription(e);
}
/**
* {@inheritDoc}
*/
public ResourceProperties getEntityResourceProperties(Reference ref)
{
checkReference(ref);
EntityHandler eh = findEntityHandler(ref);
Entity e = getEntity(ref, eh);
return eh.getProperties(e);
}
/**
* {@inheritDoc}
*/
public Entity getEntity(Reference ref)
{
checkReference(ref);
EntityHandler eh = findEntityHandler(ref);
return getEntity(ref, eh);
}
/**
* {@inheritDoc} The format of the URL is controlled by a MessageFormat
* String injected into urlFormat. The parameters are 0 = global Page Name
*/
public String getEntityUrl(Reference ref)
{
checkReference(ref);
EntityHandler eh = findEntityHandler(ref);
Entity entity = getEntity(ref, eh);
return eh.getUrl(entity);
}
/**
* {@inheritDoc}
*/
public Collection getEntityAuthzGroups(Reference ref, String userId)
{
checkReference(ref);
EntityHandler eh = findEntityHandler(ref);
return eh.getAuthzGroups(ref, userId);
}
/**
* {@inheritDoc}
*/
public HttpAccess getHttpAccess()
{
return new HttpAccess()
{
public void handleAccess(HttpServletRequest req,
HttpServletResponse res, Reference ref,
Collection copyrightAcceptedRefs)
throws EntityPermissionException,
EntityNotDefinedException,
EntityAccessOverloadException,
EntityCopyrightException
{
// Check for session in request parameter
String sessionId = req.getParameter("session");
if (sessionId != null )
{
Session session = sessionManager.getSession( sessionId );
if ( session != null )
sessionManager.setCurrentSession( session );
}
try
{
checkReference(ref);
}
catch (Throwable t)
{
throw new EntityNotDefinedException(ref.getId());
}
try
{
EntityHandler eh = findEntityHandler(ref);
Entity entity = getEntity(ref, eh);
String user = req.getRemoteUser();
if (entity instanceof RWikiEntity)
{
RWikiEntity rwe = (RWikiEntity) entity;
if (!rwe.isContainer())
{
RWikiObject rwo = rwe.getRWikiObject();
if (wikiSecurityService
.checkRead(rwe))
{
String space = NameHelper.localizeSpace(rwo.getName(), rwo.getRealm());
RWikiEntity sideBar = null;
if (exists("view_right", space))
{
try
{
RWikiObject rwoSB = getRWikiObject("view_right", space);
sideBar = (RWikiEntity) getEntity(rwoSB);
if (!wikiSecurityService.checkRead(sideBar))
{
sideBar = null;
}
}
catch (Exception ex)
{
sideBar = null;
}
}
if ( trackReads )
eventTrackingService.post(
eventTrackingService.newEvent(
RWikiObjectService.EVENT_RESOURCE_READ, ref.getReference(), false,
NotificationService.PREF_NONE));
eh.outputContent(entity, sideBar, req, res);
}
else
{
throw new org.sakaiproject.exception.PermissionException(
user, RWikiSecurityService.SECURE_READ,
ref.getReference());
}
}
else
{
// this is a container, read on the site
if (wikiSecurityService.checkGetPermission(ref
.getReference()))
{
String space = rwe.getReference();
RWikiEntity sideBar = null;
if (exists("view_right", space))
{
try
{
RWikiObject rwoSB = getRWikiObject("view_right", space);
sideBar = (RWikiEntity) getEntity(rwoSB);
if (!wikiSecurityService.checkRead(sideBar))
{
sideBar = null;
}
}
catch (Exception ex)
{
sideBar = null;
}
}
if ( trackReads )
eventTrackingService.post(
eventTrackingService.newEvent(
RWikiObjectService.EVENT_RESOURCE_READ, ref.getReference(), false,
NotificationService.PREF_NONE));
eh.outputContent(entity, sideBar, req, res);
}
else
{
throw new org.sakaiproject.exception.PermissionException(
user, RWikiSecurityService.SECURE_READ,
ref.getReference());
}
}
}
else
{
throw new EntityNotDefinedException(ref.getReference());
}
}
catch (org.sakaiproject.exception.PermissionException p ) {
throw new EntityPermissionException(p.getUser(),p.getLock(),p.getResource());
}
catch ( EntityNotDefinedException e) {
throw e;
}
catch (Throwable t)
{
log.warn("Error getting wiki page via access :" //$NON-NLS-1$
+ ref.getReference());
log.debug("Stack trace was ", t); //$NON-NLS-1$
throw new RuntimeException(ref.getReference(), t);
}
}
};
}
/**
* see if the reference matches one of the regeistered regex patterns
*
* @param reference
* @return the Entity handler that shoul be used to generate content and
* format the URL
*/
private EntityHandler findEntityReferenceMatch(String reference)
{
if (!reference.startsWith(REFERENCE_ROOT)) return null;
for (Iterator i = m_handlers.keySet().iterator(); i.hasNext();)
{
String s = (String) i.next();
EntityHandler eh = (EntityHandler) m_handlers.get(s);
if (eh.matches(reference)) return eh;
}
return null;
}
private void checkReference(Reference ref)
{
if (!APPLICATION_ID.equals(ref.getType()))
throw new RuntimeException(
"Wiki page does not exist, sorry "); //$NON-NLS-1$
}
/**
* Looks up the entity handler based on sybtype, the registerd subtype must
* match the key in the m_handlers map
*
* @param ref
* @return
*/
private EntityHandler findEntityHandler(Reference ref)
{
if (!APPLICATION_ID.equals(ref.getType())) return null;
String subtype = ref.getSubType();
return (EntityHandler) m_handlers.get(subtype);
}
/**
* Get the entity, already having looked up the entity handler
*
* @param eh
* @return
*/
private Entity getEntity(Reference ref, EntityHandler eh)
{
RWikiObject rwo = this.getRWikiCurrentObjectDao().findByGlobalName(
ref.getId());
int revision = eh.getRevision(ref);
if (rwo != null && revision != -1
&& revision != rwo.getRevision().intValue())
{
RWikiObject hrwo = this.getRWikiHistoryObjectDao()
.getRWikiHistoryObject(rwo, revision);
if (hrwo != null)
{
rwo = hrwo;
}
}
RWikiEntity rwe = null;
if (rwo == null)
{
rwe = (RWikiEntity) getReferenceEntity(ref);
}
else
{
rwe = (RWikiEntity) getEntity(rwo);
}
return rwe;
}
public Entity getReferenceEntity(Reference ref)
{
return new RWikiEntityImpl(ref);
}
/**
* {@inheritDoc}
*
* @param rwo
* @return
*/
public Entity getEntity(RWikiObject rwo)
{
return new RWikiEntityImpl(rwo);
}
/**
* {@inheritDoc}
*
* @param rwo
* @return
*/
public Reference getReference(RWikiObject rwo)
{
return entityManager.newReference(getEntity(rwo).getReference());
}
/**
* @return Returns the handlers.
*/
public Map getHandlers()
{
return m_handlers;
}
/**
* @param m_handlers
* The handlers to set.
*/
public void setHandlers(Map m_handlers)
{
this.m_handlers = m_handlers;
}
/**
* Disable the tool from the site
*
* @param context
*/
private void disableWiki(String context)
{
// ? we are not going to delete the content, so do nothing
}
/**
* Enable the tool in the site
*
* @param context
*/
private void enableWiki(String context)
{
// we could perform pre-populate at this stage
}
/**
* {@inheritDoc}
*/
public List findAllChangedSince(Date time, String basepath)
{
// TODO: Put authz in place
return cdao.findAllChangedSince(time, basepath);
}
/**
* {@inheritDoc}
*/
public boolean checkRead(RWikiObject rwo)
{
return wikiSecurityService.checkRead((RWikiEntity) getEntity(rwo));
}
/**
* {@inheritDoc}
*/
public boolean checkUpdate(RWikiObject rwo)
{
return wikiSecurityService.checkUpdate((RWikiEntity) getEntity(rwo));
}
/**
* {@inheritDoc}
*/
public boolean checkAdmin(RWikiObject rwo)
{
return wikiSecurityService.checkAdmin((RWikiEntity) getEntity(rwo));
}
/**
* {@inheritDoc}
*/
public List findAllPageNames()
{
return cdao.findAllPageNames();
}
/**
* {@inheritDoc}
*/
public String createReference(String pageName)
{
return RWikiObjectService.REFERENCE_ROOT + pageName + "."; //$NON-NLS-1$
}
/**
* {@inheritDoc}
*/
public PageLinkRenderer getComponentPageLinkRender(String pageSpace, boolean withBreadCrumb)
{
return new ComponentPageLinkRenderImpl(pageSpace,withBreadCrumb);
}
/**
* @return the aliasService
*/
public AliasService getAliasService()
{
return aliasService;
}
/**
* @param aliasService the aliasService to set
*/
public void setAliasService(AliasService aliasService)
{
this.aliasService = aliasService;
}
public void transferCopyEntities(String fromContext, String toContext, List ids, boolean cleanup)
{
try
{
if(cleanup == true)
{
//TODO
}
}
catch (Exception e)
{
log.info("Rwiki transferCopyEntities Error" + e);
}
transferCopyEntities(fromContext, toContext, ids);
}
}
| |
/*******************************************************************************
* Copyright (c) 2000, 2011 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.jdt.internal.corext.refactoring.structure.constraints;
import org.eclipse.core.runtime.Assert;
import org.eclipse.jdt.internal.corext.refactoring.typeconstraints.types.HierarchyType;
import org.eclipse.jdt.internal.corext.refactoring.typeconstraints.types.TType;
import org.eclipse.jdt.internal.corext.refactoring.typeconstraints2.ITypeSet;
/**
* Optimized type sets for supertype constraint problems.
*/
public abstract class SuperTypeSet implements ITypeSet {
/** Implementation of an empty set */
private static class SuperTypeEmptySet extends SuperTypeSet {
/*
* @see org.eclipse.jdt.internal.corext.refactoring.typeconstraints2.ITypeSet#chooseSingleType()
*/
public final TType chooseSingleType() {
return null;
}
/*
* @see org.eclipse.jdt.internal.corext.refactoring.typeconstraints2.ITypeSet#isEmpty()
*/
public final boolean isEmpty() {
return true;
}
/*
* @see org.eclipse.jdt.internal.corext.refactoring.typeconstraints2.ITypeSet#restrictedTo(org.eclipse.jdt.internal.corext.refactoring.typeconstraints2.ITypeSet)
*/
public final ITypeSet restrictedTo(final ITypeSet set) {
return this;
}
/*
* @see java.lang.Object#toString()
*/
@Override
public final String toString() {
return "EMPTY"; //$NON-NLS-1$
}
}
/** Implementation of a singleton */
private static class SuperTypeSingletonSet extends SuperTypeSet {
/** The type */
private final TType fType;
/**
* Creates a new super type singleton set.
*
* @param type the type
*/
private SuperTypeSingletonSet(final TType type) {
fType= type;
}
/*
* @see org.eclipse.jdt.internal.corext.refactoring.typeconstraints2.ITypeSet#chooseSingleType()
*/
public final TType chooseSingleType() {
return fType;
}
/*
* @see org.eclipse.jdt.internal.corext.refactoring.typeconstraints2.ITypeSet#isEmpty()
*/
public final boolean isEmpty() {
return false;
}
/*
* @see org.eclipse.jdt.internal.corext.refactoring.typeconstraints2.ITypeSet#restrictedTo(org.eclipse.jdt.internal.corext.refactoring.typeconstraints2.ITypeSet)
*/
public final ITypeSet restrictedTo(final ITypeSet set) {
final TType leftErasure= fType.getErasure();
if (set instanceof SuperTypeUniverse) {
return this;
} else if (set instanceof SuperTypeSingletonSet) {
if (this == set)
return this;
if (fType.isNullType())
return this;
final SuperTypeSingletonSet singleton= (SuperTypeSingletonSet) set;
final TType rightErasure= singleton.fType.getErasure();
if (leftErasure.isHierarchyType() && rightErasure.isHierarchyType()) {
if (leftErasure.isGenericType() || rightErasure.isGenericType()) {
if (rightErasure.equals(leftErasure) || ((HierarchyType) leftErasure).isSubType((HierarchyType) rightErasure))
return this;
}
}
if (rightErasure.isJavaLangObject())
return this;
if (leftErasure.canAssignTo(rightErasure))
return this;
return SuperTypeSet.getEmpty();
} else if (set instanceof SuperTypeTuple) {
if (fType.isNullType())
return this;
final SuperTypeTuple tuple= (SuperTypeTuple) set;
final TType rightErasure= tuple.fSuperType.getErasure();
if (leftErasure.isHierarchyType() && rightErasure.isHierarchyType()) {
if (leftErasure.isGenericType() || rightErasure.isGenericType()) {
if (rightErasure.equals(leftErasure) || ((HierarchyType) leftErasure).isSubType((HierarchyType) rightErasure))
return this;
}
}
if (rightErasure.isJavaLangObject())
return this;
if (leftErasure.canAssignTo(rightErasure))
return this;
return SuperTypeSet.createTypeSet(tuple.fSubType);
} else if (set instanceof SuperTypeEmptySet) {
return set;
} else
Assert.isTrue(false);
return null;
}
/*
* @see java.lang.Object#toString()
*/
@Override
public final String toString() {
return fType.getPrettySignature();
}
}
/** Implementation of a tuple */
private static class SuperTypeTuple extends SuperTypeSet {
/** The other type */
private final TType fSubType;
/** The super type */
private final TType fSuperType;
/**
* Creates a new super type tuple.
*
* @param subType the sub type
* @param superType the super type
*/
private SuperTypeTuple(final TType subType, final TType superType) {
fSubType= subType;
fSuperType= superType;
}
/*
* @see org.eclipse.jdt.internal.corext.refactoring.typeconstraints2.ITypeSet#chooseSingleType()
*/
public final TType chooseSingleType() {
return fSuperType;
}
/*
* @see org.eclipse.jdt.internal.corext.refactoring.typeconstraints2.ITypeSet#isEmpty()
*/
public final boolean isEmpty() {
return false;
}
/*
* @see org.eclipse.jdt.internal.corext.refactoring.typeconstraints2.ITypeSet#restrictedTo(org.eclipse.jdt.internal.corext.refactoring.typeconstraints2.ITypeSet)
*/
public final ITypeSet restrictedTo(final ITypeSet set) {
if (set instanceof SuperTypeUniverse) {
return this;
} else if (set instanceof SuperTypeSingletonSet) {
final SuperTypeSingletonSet singleton= (SuperTypeSingletonSet) set;
final TType rightErasure= singleton.fType.getErasure();
final TType subErasure= fSubType.getErasure();
final TType superErasure= fSuperType.getErasure();
if (subErasure.isHierarchyType() && superErasure.isHierarchyType() && rightErasure.isHierarchyType()) {
if (subErasure.isGenericType() || superErasure.isGenericType() || rightErasure.isGenericType()) {
if ((rightErasure.equals(subErasure) || ((HierarchyType) subErasure).isSubType((HierarchyType) rightErasure))
&& (rightErasure.equals(superErasure) || ((HierarchyType) superErasure).isSubType((HierarchyType) rightErasure)))
return this;
}
}
if (rightErasure.isJavaLangObject())
return this;
if (subErasure.canAssignTo(rightErasure) && superErasure.canAssignTo(rightErasure))
return this;
return SuperTypeSet.createTypeSet(fSubType);
} else if (set instanceof SuperTypeTuple) {
return this;
} else if (set instanceof SuperTypeEmptySet) {
return set;
} else
Assert.isTrue(false);
return null;
}
/*
* @see java.lang.Object#toString()
*/
@Override
public final String toString() {
return "[" + fSubType.getPrettySignature() + ", " + fSuperType.getPrettySignature() + "]"; //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
}
}
/** Implementation of the type universe */
private static class SuperTypeUniverse extends SuperTypeSet {
/*
* @see org.eclipse.jdt.internal.corext.refactoring.typeconstraints2.ITypeSet#chooseSingleType()
*/
public final TType chooseSingleType() {
return null;
}
/*
* @see org.eclipse.jdt.internal.corext.refactoring.typeconstraints2.ITypeSet#isEmpty()
*/
public final boolean isEmpty() {
return false;
}
/*
* @see org.eclipse.jdt.internal.corext.refactoring.typeconstraints2.ITypeSet#restrictedTo(org.eclipse.jdt.internal.corext.refactoring.typeconstraints2.ITypeSet)
*/
public final ITypeSet restrictedTo(final ITypeSet set) {
return set;
}
/*
* @see java.lang.Object#toString()
*/
@Override
public final String toString() {
return "UNIVERSE"; //$NON-NLS-1$
}
}
/** The empty set */
private static final ITypeSet fgEmpty= new SuperTypeEmptySet();
/** The universe */
private static final ITypeSet fgUniverse= new SuperTypeUniverse();
/**
* Creates a new type set.
*
* @param type the type to contain, or <code>null</code>
* @return the type set, or the universe if <code>type</code> is <code>null</code>
*/
public static ITypeSet createTypeSet(final TType type) {
if (type == null)
return fgUniverse;
return new SuperTypeSingletonSet(type);
}
/**
* Creates a new type set.
*
* @param subType the sub type
* @param superType the super type
* @return the type set, or the universe if <code>type</code> is <code>null</code>
*/
public static ITypeSet createTypeSet(final TType subType, final TType superType) {
if (subType == null || superType == null)
return fgUniverse;
return new SuperTypeTuple(subType, superType);
}
/**
* Returns the empty set.
*
* @return the empty set
*/
public static ITypeSet getEmpty() {
return fgEmpty;
}
/**
* Returns the universe set.
*
* @return the universe set
*/
public static ITypeSet getUniverse() {
return fgUniverse;
}
}
| |
package ru.spoddubnyak;
import java.util.Iterator;
/**
* Class SimpleLinkedList describes the actions of the container on the basis of a bidirectional list.
*
* @param <E> This describes my type parameter
* @author Sergei Poddubnyak (forvvard09@gmail.com)
* @version 1.0
* @since 11.07.2017
*/
public class SimpleLinkedList<E> implements SimpleContainer<E> {
/**
* property for a pointer to the first element.
*/
private Element<E> firstElement;
/**
* property for a pointer to the last element.
*/
private Element<E> lastElement;
/**
* property size collection, number of items in the SimpleLinkedList.
*/
private int size = 0;
/**
* Getter for first element.
* @return link first element
*/
public Element<E> getFirstElement() {
return this.firstElement;
}
/**
* Constructor it creates a new object SimpleLinkedList container with the specified values.
*/
SimpleLinkedList() {
this.lastElement = new Element<E>();
this.firstElement = new Element<E>();
this.firstElement.nextElement = this.lastElement;
this.firstElement.previusElement = this.lastElement;
this.lastElement.previusElement = this.firstElement;
this.lastElement.nextElement = this.firstElement;
}
/**
* Getter for property size.
* @return property size, number of items in the SimpleLinkedList
*/
public int getSize() {
return this.size;
}
/**
* Method removeLastElement last element in collections SimpleLinledList<E>.
* @throws IndexOutOfBoundsException - if, not elements in collections for removing
*/
public void removeLastElement() throws IndexOutOfBoundsException {
if (0 == getSize()) {
throw new IndexOutOfBoundsException("There are no objects to delete in the collection.");
}
Element<E> temporaryLink = this.lastElement.previusElement.previusElement;
this.lastElement.previusElement.previusElement.nextElement = null;
this.lastElement.previusElement.previusElement = null;
this.lastElement.previusElement.nextElement = null;
this.lastElement.previusElement = temporaryLink;
this.lastElement.previusElement.nextElement = this.lastElement;
temporaryLink = null;
this.size--;
}
/**
* Method removeFirstElement first element in collections SimpleLinledList<E>.
* @throws IndexOutOfBoundsException - if, not elements in collections for removing
*/
public void removeFirstElement() throws IndexOutOfBoundsException {
if (0 == getSize()) {
throw new IndexOutOfBoundsException("There are no objects to delete in the collection.");
}
Element<E> temporaryLink = this.firstElement.nextElement.nextElement;
this.firstElement.nextElement.nextElement.previusElement = null;
this.firstElement.nextElement.nextElement = null;
this.firstElement.nextElement.previusElement = null;
this.firstElement.nextElement = temporaryLink;
this.firstElement.nextElement.previusElement = this.firstElement;
temporaryLink = null;
this.size--;
}
@Override
public void add(E value) {
Element<E> newElement = new Element<E>(value, lastElement.previusElement, lastElement);
lastElement.previusElement.nextElement = newElement;
lastElement.previusElement = newElement;
this.size++;
}
@Override
public E get(int index) {
if (getSize() <= index) {
throw new IndexOutOfBoundsException("Went beyond the collection.");
}
Element<E> resultElement = this.firstElement.nextElement;
for (int i = 0; i < index; i++) {
resultElement = resultElement.nextElement;
}
return resultElement.getValueElement();
}
@Override
public Iterator<E> iterator() {
return new MyLinkedListIterator<E>();
}
/**
* Inner private Class to create a doubly-linked list of elements.
*
* @param <E> This describes my type parameter
*/
public class Element<E> {
/**
* property for a pointer to the first element.
*/
private E valueElement;
/**
* property for a pointer to the first element.
*/
private Element<E> previusElement;
/**
* property for a pointer to the first element.
*/
private Element<E> nextElement;
/**
* Constructor it creates a new object Element with the specified.
*
* @param value value for Element
* @param prev link of previus Element
* @param next link of next Element
*/
Element(E value, Element<E> prev, Element<E> next) {
this.valueElement = value;
this.previusElement = prev;
this.nextElement = next;
}
/**
* Constructor it creates a new object Element with the specified.
*
*/
Element() {
this.valueElement = null;
this.previusElement = null;
this.nextElement = null;
}
/**
* Getter for property valueElement.
* @return value Element
*/
public E getValueElement() {
return this.valueElement;
}
/**
* Setter for property valueElement.
* @param valueElement value for Element
*/
public void setValueElement(E valueElement) {
this.valueElement = valueElement;
}
/**
* Getter for link of previus element.
* @return link of previus element
*/
public Element<E> getPreviusElement() {
return previusElement;
}
/**
* Getter for link of next element.
* @return link of next element
*/
public Element<E> getNextElement() {
return nextElement;
}
}
/**
* Inner Class MyArrayListIterator implement the iterator .
*
* @param <E> This describes my type parameter
*/
private class MyLinkedListIterator<E> implements Iterator<E> {
/**
* property index in Iterator.
*/
private int positionIterator = 0;
@Override
public boolean hasNext() {
return this.positionIterator < getSize();
}
@Override
public E next() {
E result = null;
if (this.hasNext()) {
result = (E) get(this.positionIterator++);
} else {
throw new IndexOutOfBoundsException("Went beyond the collection.");
}
return result;
}
}
}
| |
/**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kew.actionitem;
import org.kuali.rice.kew.api.KewApiConstants;
import org.kuali.rice.kew.api.actionlist.DisplayParameters;
import org.kuali.rice.kew.api.document.DocumentStatus;
import org.kuali.rice.kew.api.preferences.Preferences;
import org.kuali.rice.kew.routeheader.DocumentRouteHeaderValueActionListExtension;
import org.kuali.rice.kew.service.KEWServiceLocator;
import org.kuali.rice.kew.web.RowStyleable;
import org.kuali.rice.kim.api.group.Group;
import org.kuali.rice.kim.api.identity.principal.EntityNamePrincipalName;
import org.kuali.rice.kim.api.services.KimApiServiceLocator;
import javax.persistence.MappedSuperclass;
import javax.persistence.Transient;
import java.sql.Timestamp;
import java.util.HashMap;
import java.util.Map;
/**
* Alternate model object for action list fetches that do not automatically use
* ojb collections. This is here to make action list faster.
*
* @author Kuali Rice Team (rice.collab@kuali.org)
*
*/
@MappedSuperclass
public class ActionItemActionListExtension extends ActionItem implements RowStyleable {
private static final long serialVersionUID = -8801104028828059623L;
@Transient
private Timestamp lastApprovedDate;
@Transient
private Map<String, String> customActions = new HashMap<String, String>();
@Transient
private String rowStyleClass;
@Transient
private Integer actionListIndex;
@Transient
private String delegatorName = "";
@Transient
private String groupName = "";
@Transient
private DisplayParameters displayParameters;
@Transient
private boolean isInitialized = false;
@Transient
private DocumentRouteHeaderValueActionListExtension routeHeader;
@Transient
private boolean lastApprovedDateInitialized = false;
@Transient
private boolean delegatorNameInitialized = false;
@Transient
private boolean groupNameInitialized = false;
public Integer getActionListIndex() {
return actionListIndex;
}
public void setActionListIndex(Integer actionListIndex) {
this.actionListIndex = actionListIndex;
}
public Timestamp getLastApprovedDate() {
initializeLastApprovedDate();
return this.lastApprovedDate;
}
public Map<String, String> getCustomActions() {
return customActions;
}
public void setCustomActions(Map<String, String> customActions) {
this.customActions = customActions;
}
public String getRowStyleClass() {
return rowStyleClass;
}
public void setRowStyleClass(String rowStyleClass) {
this.rowStyleClass = rowStyleClass;
}
public String getDelegatorName() {
initializeDelegatorName();
return delegatorName;
}
public String getGroupName() {
initializeGroupName();
return groupName;
}
public void initialize(Preferences preferences) {
// always re-initialize row style class, just in case they changed a preference!
initializeRowStyleClass(preferences);
if (isInitialized) {
return;
}
if (KewApiConstants.PREFERENCES_YES_VAL.equals(preferences.getShowWorkgroupRequest())) {
initializeGroupName();
}
if (KewApiConstants.PREFERENCES_YES_VAL.equals(preferences.getShowDelegator())) {
initializeDelegatorName();
}
if (KewApiConstants.PREFERENCES_YES_VAL.equals(preferences.getShowDateApproved())) {
initializeLastApprovedDate();
}
this.routeHeader.initialize(preferences);
isInitialized = true;
}
private void initializeRowStyleClass(Preferences preferences) {
//set background colors for document statuses
if (KewApiConstants.ROUTE_HEADER_CANCEL_CD.equalsIgnoreCase(routeHeader.getDocRouteStatus())) {
setRowStyleClass(KewApiConstants.ACTION_LIST_COLOR_PALETTE.get(preferences.getColorCanceled()));
} else if (KewApiConstants.ROUTE_HEADER_DISAPPROVED_CD.equalsIgnoreCase(routeHeader.getDocRouteStatus())) {
setRowStyleClass(KewApiConstants.ACTION_LIST_COLOR_PALETTE.get(preferences.getColorDisapproved()));
} else if (KewApiConstants.ROUTE_HEADER_ENROUTE_CD.equalsIgnoreCase(routeHeader.getDocRouteStatus())) {
setRowStyleClass(KewApiConstants.ACTION_LIST_COLOR_PALETTE.get(preferences.getColorEnroute()));
} else if (KewApiConstants.ROUTE_HEADER_EXCEPTION_CD.equalsIgnoreCase(routeHeader.getDocRouteStatus())) {
setRowStyleClass(KewApiConstants.ACTION_LIST_COLOR_PALETTE.get(preferences.getColorException()));
} else if (KewApiConstants.ROUTE_HEADER_FINAL_CD.equalsIgnoreCase(routeHeader.getDocRouteStatus())) {
setRowStyleClass(KewApiConstants.ACTION_LIST_COLOR_PALETTE.get(preferences.getColorFinal()));
} else if (KewApiConstants.ROUTE_HEADER_INITIATED_CD.equalsIgnoreCase(routeHeader.getDocRouteStatus())) {
setRowStyleClass(KewApiConstants.ACTION_LIST_COLOR_PALETTE.get(preferences.getColorInitiated()));
} else if (KewApiConstants.ROUTE_HEADER_PROCESSED_CD.equalsIgnoreCase(routeHeader.getDocRouteStatus())) {
setRowStyleClass(KewApiConstants.ACTION_LIST_COLOR_PALETTE.get(preferences.getColorProcessed()));
} else if (KewApiConstants.ROUTE_HEADER_SAVED_CD.equalsIgnoreCase(routeHeader.getDocRouteStatus())) {
setRowStyleClass(KewApiConstants.ACTION_LIST_COLOR_PALETTE.get(preferences.getColorSaved()));
}
}
private void initializeGroupName() {
if (!groupNameInitialized) {
if (getGroupId() != null) {
Group group = super.getGroup();
this.groupName = group.getName();
}
groupNameInitialized = true;
}
}
private void initializeDelegatorName() {
if (!delegatorNameInitialized) {
if (getDelegatorPrincipalId() != null) {
EntityNamePrincipalName name = KimApiServiceLocator.getIdentityService().getDefaultNamesForPrincipalId(getDelegatorPrincipalId());
if (name != null) {
this.delegatorName = name.getDefaultName().getCompositeName();
}
}
if (getDelegatorGroupId() != null) {
Group delegatorGroup = KimApiServiceLocator.getGroupService().getGroup(getDelegatorGroupId());
if (delegatorGroup !=null)
delegatorName = delegatorGroup.getName();
}
delegatorNameInitialized = true;
}
}
private void initializeLastApprovedDate() {
if (!lastApprovedDateInitialized) {
this.lastApprovedDate = KEWServiceLocator.getActionTakenService().getLastApprovedDate(getDocumentId());
lastApprovedDateInitialized = true;
}
}
public DisplayParameters getDisplayParameters() {
return displayParameters;
}
public void setDisplayParameters(DisplayParameters displayParameters) {
this.displayParameters = displayParameters;
}
public DocumentRouteHeaderValueActionListExtension getRouteHeader() {
return this.routeHeader;
}
public void setRouteHeader(DocumentRouteHeaderValueActionListExtension routeHeader) {
this.routeHeader = routeHeader;
}
}
| |
/* Generic definitions */
/* Assertions (useful to generate conditional code) */
/* Current type and class (and size, if applicable) */
/* Value methods */
/* Interfaces (keys) */
/* Interfaces (values) */
/* Abstract implementations (keys) */
/* Abstract implementations (values) */
/* Static containers (keys) */
/* Static containers (values) */
/* Implementations */
/* Synchronized wrappers */
/* Unmodifiable wrappers */
/* Other wrappers */
/* Methods (keys) */
/* Methods (values) */
/* Methods (keys/values) */
/* Methods that have special names depending on keys (but the special names depend on values) */
/* Equality */
/* Object/Reference-only definitions (keys) */
/* Primitive-type-only definitions (keys) */
/* Object/Reference-only definitions (values) */
/* Primitive-type-only definitions (values) */
/*
* Copyright (C) 2002-2013 Sebastiano Vigna
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.unimi.dsi.fastutil.doubles;
import it.unimi.dsi.fastutil.Hash;
import it.unimi.dsi.fastutil.HashCommon;
import it.unimi.dsi.fastutil.booleans.BooleanArrays;
import static it.unimi.dsi.fastutil.HashCommon.arraySize;
import static it.unimi.dsi.fastutil.HashCommon.maxFill;
import java.util.Map;
import java.util.NoSuchElementException;
import it.unimi.dsi.fastutil.floats.FloatCollection;
import it.unimi.dsi.fastutil.floats.AbstractFloatCollection;
import it.unimi.dsi.fastutil.floats.FloatIterator;
import it.unimi.dsi.fastutil.objects.AbstractObjectSet;
import it.unimi.dsi.fastutil.objects.ObjectIterator;
/** A type-specific hash map with a fast, small-footprint implementation.
*
* <P>Instances of this class use a hash table to represent a map. The table is
* enlarged as needed by doubling its size when new entries are created, but it is <em>never</em> made
* smaller (even on a {@link #clear()}). A family of {@linkplain #trim() trimming
* methods} lets you control the size of the table; this is particularly useful
* if you reuse instances of this class.
*
* <p><strong>Warning:</strong> The implementation of this class has significantly
* changed in <code>fastutil</code> 6.1.0. Please read the
* comments about this issue in the section “Faster Hash Tables” of the <a href="../../../../../overview-summary.html">overview</a>.
*
* @see Hash
* @see HashCommon
*/
public class Double2FloatOpenHashMap extends AbstractDouble2FloatMap implements java.io.Serializable, Cloneable, Hash {
private static final long serialVersionUID = 0L;
private static final boolean ASSERTS = false;
/** The array of keys. */
protected transient double key[];
/** The array of values. */
protected transient float value[];
/** The array telling whether a position is used. */
protected transient boolean used[];
/** The acceptable load factor. */
protected final float f;
/** The current table size. */
protected transient int n;
/** Threshold after which we rehash. It must be the table size times {@link #f}. */
protected transient int maxFill;
/** The mask for wrapping a position counter. */
protected transient int mask;
/** Number of entries in the set. */
protected int size;
/** Cached set of entries. */
protected transient volatile FastEntrySet entries;
/** Cached set of keys. */
protected transient volatile DoubleSet keys;
/** Cached collection of values. */
protected transient volatile FloatCollection values;
/** Creates a new hash map.
*
* <p>The actual table size will be the least power of two greater than <code>expected</code>/<code>f</code>.
*
* @param expected the expected number of elements in the hash set.
* @param f the load factor.
*/
@SuppressWarnings("unchecked")
public Double2FloatOpenHashMap( final int expected, final float f ) {
if ( f <= 0 || f > 1 ) throw new IllegalArgumentException( "Load factor must be greater than 0 and smaller than or equal to 1" );
if ( expected < 0 ) throw new IllegalArgumentException( "The expected number of elements must be nonnegative" );
this.f = f;
n = arraySize( expected, f );
mask = n - 1;
maxFill = maxFill( n, f );
key = new double[ n ];
value = new float[ n ];
used = new boolean[ n ];
}
/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
*
* @param expected the expected number of elements in the hash map.
*/
public Double2FloatOpenHashMap( final int expected ) {
this( expected, DEFAULT_LOAD_FACTOR );
}
/** Creates a new hash map with initial expected {@link Hash#DEFAULT_INITIAL_SIZE} entries
* and {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
*/
public Double2FloatOpenHashMap() {
this( DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR );
}
/** Creates a new hash map copying a given one.
*
* @param m a {@link Map} to be copied into the new hash map.
* @param f the load factor.
*/
public Double2FloatOpenHashMap( final Map<? extends Double, ? extends Float> m, final float f ) {
this( m.size(), f );
putAll( m );
}
/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor copying a given one.
*
* @param m a {@link Map} to be copied into the new hash map.
*/
public Double2FloatOpenHashMap( final Map<? extends Double, ? extends Float> m ) {
this( m, DEFAULT_LOAD_FACTOR );
}
/** Creates a new hash map copying a given type-specific one.
*
* @param m a type-specific map to be copied into the new hash map.
* @param f the load factor.
*/
public Double2FloatOpenHashMap( final Double2FloatMap m, final float f ) {
this( m.size(), f );
putAll( m );
}
/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor copying a given type-specific one.
*
* @param m a type-specific map to be copied into the new hash map.
*/
public Double2FloatOpenHashMap( final Double2FloatMap m ) {
this( m, DEFAULT_LOAD_FACTOR );
}
/** Creates a new hash map using the elements of two parallel arrays.
*
* @param k the array of keys of the new hash map.
* @param v the array of corresponding values in the new hash map.
* @param f the load factor.
* @throws IllegalArgumentException if <code>k</code> and <code>v</code> have different lengths.
*/
public Double2FloatOpenHashMap( final double[] k, final float v[], final float f ) {
this( k.length, f );
if ( k.length != v.length ) throw new IllegalArgumentException( "The key array and the value array have different lengths (" + k.length + " and " + v.length + ")" );
for( int i = 0; i < k.length; i++ ) this.put( k[ i ], v[ i ] );
}
/** Creates a new hash map with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using the elements of two parallel arrays.
*
* @param k the array of keys of the new hash map.
* @param v the array of corresponding values in the new hash map.
* @throws IllegalArgumentException if <code>k</code> and <code>v</code> have different lengths.
*/
public Double2FloatOpenHashMap( final double[] k, final float v[] ) {
this( k, v, DEFAULT_LOAD_FACTOR );
}
/*
* The following methods implements some basic building blocks used by
* all accessors. They are (and should be maintained) identical to those used in OpenHashSet.drv.
*/
public float put(final double k, final float v) {
// The starting point.
int pos = (int)it.unimi.dsi.fastutil.HashCommon.murmurHash3(Double.doubleToRawLongBits(k)) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( (key[ pos ]) == (k) ) ) {
final float oldValue = value[ pos ];
value[ pos ] = v;
return oldValue;
}
pos = ( pos + 1 ) & mask;
}
used[ pos ] = true;
key[ pos ] = k;
value[ pos ] = v;
if ( ++size >= maxFill ) rehash( arraySize( size + 1, f ) );
if ( ASSERTS ) checkTable();
return defRetValue;
}
public Float put( final Double ok, final Float ov ) {
final float v = ((ov).floatValue());
final double k = ((ok).doubleValue());
// The starting point.
int pos = (int)it.unimi.dsi.fastutil.HashCommon.murmurHash3(Double.doubleToRawLongBits(k)) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( (key[ pos ]) == (k) ) ) {
final Float oldValue = (Float.valueOf(value[ pos ]));
value[ pos ] = v;
return oldValue;
}
pos = ( pos + 1 ) & mask;
}
used[ pos ] = true;
key[ pos ] = k;
value[ pos ] = v;
if ( ++size >= maxFill ) rehash( arraySize( size + 1, f ) );
if ( ASSERTS ) checkTable();
return (null);
}
/** Adds an increment to value currently associated with a key.
*
* @param k the key.
* @param incr the increment.
* @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key.
* @deprecated use <code>addTo()</code> instead; having the same name of a {@link java.util.Set} method turned out to be a recipe for disaster.
*/
@Deprecated
public float add(final double k, final float incr) {
return addTo( k, incr );
}
/** Adds an increment to value currently associated with a key.
*
* <P>Note that this method respects the {@linkplain #defaultReturnValue() default return value} semantics: when
* called with a key that does not currently appears in the map, the key
* will be associated with the default return value plus
* the given increment.
*
* @param k the key.
* @param incr the increment.
* @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key.
*/
public float addTo(final double k, final float incr) {
// The starting point.
int pos = (int)it.unimi.dsi.fastutil.HashCommon.murmurHash3(Double.doubleToRawLongBits(k)) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( (key[ pos ]) == (k) ) ) {
final float oldValue = value[ pos ];
value[ pos ] += incr;
return oldValue;
}
pos = ( pos + 1 ) & mask;
}
used[ pos ] = true;
key[ pos ] = k;
value[ pos ] = defRetValue + incr;
if ( ++size >= maxFill ) rehash( arraySize( size + 1, f ) );
if ( ASSERTS ) checkTable();
return defRetValue;
}
/** Shifts left entries with the specified hash code, starting at the specified position,
* and empties the resulting free entry.
*
* @param pos a starting position.
* @return the position cleared by the shifting process.
*/
protected final int shiftKeys( int pos ) {
// Shift entries with the same hash.
int last, slot;
for(;;) {
pos = ( ( last = pos ) + 1 ) & mask;
while( used[ pos ] ) {
slot = (int)it.unimi.dsi.fastutil.HashCommon.murmurHash3(Double.doubleToRawLongBits(key[ pos ])) & mask;
if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break;
pos = ( pos + 1 ) & mask;
}
if ( ! used[ pos ] ) break;
key[ last ] = key[ pos ];
value[ last ] = value[ pos ];
}
used[ last ] = false;
return last;
}
@SuppressWarnings("unchecked")
public float remove( final double k ) {
// The starting point.
int pos = (int)it.unimi.dsi.fastutil.HashCommon.murmurHash3(Double.doubleToRawLongBits(k)) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( (key[ pos ]) == (k) ) ) {
size--;
final float v = value[ pos ];
shiftKeys( pos );
return v;
}
pos = ( pos + 1 ) & mask;
}
return defRetValue;
}
@SuppressWarnings("unchecked")
public Float remove( final Object ok ) {
final double k = ((((Double)(ok)).doubleValue()));
// The starting point.
int pos = (int)it.unimi.dsi.fastutil.HashCommon.murmurHash3(Double.doubleToRawLongBits(k)) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( (key[ pos ]) == (k) ) ) {
size--;
final float v = value[ pos ];
shiftKeys( pos );
return (Float.valueOf(v));
}
pos = ( pos + 1 ) & mask;
}
return (null);
}
public Float get( final Double ok ) {
final double k = ((ok).doubleValue());
// The starting point.
int pos = (int)it.unimi.dsi.fastutil.HashCommon.murmurHash3(Double.doubleToRawLongBits( k)) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( (key[ pos ]) == ( k) ) ) return (Float.valueOf(value[ pos ]));
pos = ( pos + 1 ) & mask;
}
return (null);
}
@SuppressWarnings("unchecked")
public float get( final double k ) {
// The starting point.
int pos = (int)it.unimi.dsi.fastutil.HashCommon.murmurHash3(Double.doubleToRawLongBits(k)) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( (key[ pos ]) == (k) ) ) return value[ pos ];
pos = ( pos + 1 ) & mask;
}
return defRetValue;
}
@SuppressWarnings("unchecked")
public boolean containsKey( final double k ) {
// The starting point.
int pos = (int)it.unimi.dsi.fastutil.HashCommon.murmurHash3(Double.doubleToRawLongBits(k)) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( (key[ pos ]) == (k) ) ) return true;
pos = ( pos + 1 ) & mask;
}
return false;
}
public boolean containsValue( final float v ) {
final float value[] = this.value;
final boolean used[] = this.used;
for( int i = n; i-- != 0; ) if ( used[ i ] && ( (value[ i ]) == (v) ) ) return true;
return false;
}
/* Removes all elements from this map.
*
* <P>To increase object reuse, this method does not change the table size.
* If you want to reduce the table size, you must use {@link #trim()}.
*
*/
public void clear() {
if ( size == 0 ) return;
size = 0;
BooleanArrays.fill( used, false );
// We null all object entries so that the garbage collector can do its work.
}
public int size() {
return size;
}
public boolean isEmpty() {
return size == 0;
}
/** A no-op for backward compatibility.
*
* @param growthFactor unused.
* @deprecated Since <code>fastutil</code> 6.1.0, hash tables are doubled when they are too full.
*/
@Deprecated
public void growthFactor( int growthFactor ) {}
/** Gets the growth factor (2).
*
* @return the growth factor of this set, which is fixed (2).
* @see #growthFactor(int)
* @deprecated Since <code>fastutil</code> 6.1.0, hash tables are doubled when they are too full.
*/
@Deprecated
public int growthFactor() {
return 16;
}
/** The entry class for a hash map does not record key and value, but
* rather the position in the hash table of the corresponding entry. This
* is necessary so that calls to {@link java.util.Map.Entry#setValue(Object)} are reflected in
* the map */
private final class MapEntry implements Double2FloatMap.Entry , Map.Entry<Double, Float> {
// The table index this entry refers to, or -1 if this entry has been deleted.
private int index;
MapEntry( final int index ) {
this.index = index;
}
public Double getKey() {
return (Double.valueOf(key[ index ]));
}
public double getDoubleKey() {
return key[ index ];
}
public Float getValue() {
return (Float.valueOf(value[ index ]));
}
public float getFloatValue() {
return value[ index ];
}
public float setValue( final float v ) {
final float oldValue = value[ index ];
value[ index ] = v;
return oldValue;
}
public Float setValue( final Float v ) {
return (Float.valueOf(setValue( ((v).floatValue()) )));
}
@SuppressWarnings("unchecked")
public boolean equals( final Object o ) {
if (!(o instanceof Map.Entry)) return false;
Map.Entry<Double, Float> e = (Map.Entry<Double, Float>)o;
return ( (key[ index ]) == (((e.getKey()).doubleValue())) ) && ( (value[ index ]) == (((e.getValue()).floatValue())) );
}
public int hashCode() {
return it.unimi.dsi.fastutil.HashCommon.double2int(key[ index ]) ^ it.unimi.dsi.fastutil.HashCommon.float2int(value[ index ]);
}
public String toString() {
return key[ index ] + "=>" + value[ index ];
}
}
/** An iterator over a hash map. */
private class MapIterator {
/** The index of the next entry to be returned, if positive or zero. If negative, the next entry to be
returned, if any, is that of index -pos -2 from the {@link #wrapped} list. */
int pos = Double2FloatOpenHashMap.this.n;
/** The index of the last entry that has been returned. It is -1 if either
we did not return an entry yet, or the last returned entry has been removed. */
int last = -1;
/** A downward counter measuring how many entries must still be returned. */
int c = size;
/** A lazily allocated list containing the keys of elements that have wrapped around the table because of removals; such elements
would not be enumerated (other elements would be usually enumerated twice in their place). */
DoubleArrayList wrapped;
{
final boolean used[] = Double2FloatOpenHashMap.this.used;
if ( c != 0 ) while( ! used[ --pos ] );
}
public boolean hasNext() {
return c != 0;
}
public int nextEntry() {
if ( ! hasNext() ) throw new NoSuchElementException();
c--;
// We are just enumerating elements from the wrapped list.
if ( pos < 0 ) {
final double k = wrapped.getDouble( - ( last = --pos ) - 2 );
// The starting point.
int pos = (int)it.unimi.dsi.fastutil.HashCommon.murmurHash3(Double.doubleToRawLongBits(k)) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( (key[ pos ]) == (k) ) ) return pos;
pos = ( pos + 1 ) & mask;
}
}
last = pos;
//System.err.println( "Count: " + c );
if ( c != 0 ) {
final boolean used[] = Double2FloatOpenHashMap.this.used;
while ( pos-- != 0 && !used[ pos ] );
// When here pos < 0 there are no more elements to be enumerated by scanning, but wrapped might be nonempty.
}
return last;
}
/** Shifts left entries with the specified hash code, starting at the specified position,
* and empties the resulting free entry. If any entry wraps around the table, instantiates
* lazily {@link #wrapped} and stores the entry key.
*
* @param pos a starting position.
* @return the position cleared by the shifting process.
*/
protected final int shiftKeys( int pos ) {
// Shift entries with the same hash.
int last, slot;
for(;;) {
pos = ( ( last = pos ) + 1 ) & mask;
while( used[ pos ] ) {
slot = (int)it.unimi.dsi.fastutil.HashCommon.murmurHash3(Double.doubleToRawLongBits(key[ pos ])) & mask;
if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break;
pos = ( pos + 1 ) & mask;
}
if ( ! used[ pos ] ) break;
if ( pos < last ) {
// Wrapped entry.
if ( wrapped == null ) wrapped = new DoubleArrayList ();
wrapped.add( key[ pos ] );
}
key[ last ] = key[ pos ];
value[ last ] = value[ pos ];
}
used[ last ] = false;
return last;
}
@SuppressWarnings("unchecked")
public void remove() {
if ( last == -1 ) throw new IllegalStateException();
if ( pos < -1 ) {
// We're removing wrapped entries.
Double2FloatOpenHashMap.this.remove( wrapped.getDouble( - pos - 2 ) );
last = -1;
return;
}
size--;
if ( shiftKeys( last ) == pos && c > 0 ) {
c++;
nextEntry();
}
last = -1; // You can no longer remove this entry.
if ( ASSERTS ) checkTable();
}
public int skip( final int n ) {
int i = n;
while( i-- != 0 && hasNext() ) nextEntry();
return n - i - 1;
}
}
private class EntryIterator extends MapIterator implements ObjectIterator<Double2FloatMap.Entry > {
private MapEntry entry;
public Double2FloatMap.Entry next() {
return entry = new MapEntry( nextEntry() );
}
@Override
public void remove() {
super.remove();
entry.index = -1; // You cannot use a deleted entry.
}
}
private class FastEntryIterator extends MapIterator implements ObjectIterator<Double2FloatMap.Entry > {
final BasicEntry entry = new BasicEntry ( ((double)0), (0) );
public BasicEntry next() {
final int e = nextEntry();
entry.key = key[ e ];
entry.value = value[ e ];
return entry;
}
}
private final class MapEntrySet extends AbstractObjectSet<Double2FloatMap.Entry > implements FastEntrySet {
public ObjectIterator<Double2FloatMap.Entry > iterator() {
return new EntryIterator();
}
public ObjectIterator<Double2FloatMap.Entry > fastIterator() {
return new FastEntryIterator();
}
@SuppressWarnings("unchecked")
public boolean contains( final Object o ) {
if ( !( o instanceof Map.Entry ) ) return false;
final Map.Entry<Double, Float> e = (Map.Entry<Double, Float>)o;
final double k = ((e.getKey()).doubleValue());
// The starting point.
int pos = (int)it.unimi.dsi.fastutil.HashCommon.murmurHash3(Double.doubleToRawLongBits(k)) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( (key[ pos ]) == (k) ) ) return ( (value[ pos ]) == (((e.getValue()).floatValue())) );
pos = ( pos + 1 ) & mask;
}
return false;
}
@SuppressWarnings("unchecked")
public boolean remove( final Object o ) {
if ( !( o instanceof Map.Entry ) ) return false;
final Map.Entry<Double, Float> e = (Map.Entry<Double, Float>)o;
final double k = ((e.getKey()).doubleValue());
// The starting point.
int pos = (int)it.unimi.dsi.fastutil.HashCommon.murmurHash3(Double.doubleToRawLongBits(k)) & mask;
// There's always an unused entry.
while( used[ pos ] ) {
if ( ( (key[ pos ]) == (k) ) ) {
Double2FloatOpenHashMap.this.remove( e.getKey() );
return true;
}
pos = ( pos + 1 ) & mask;
}
return false;
}
public int size() {
return size;
}
public void clear() {
Double2FloatOpenHashMap.this.clear();
}
}
public FastEntrySet double2FloatEntrySet() {
if ( entries == null ) entries = new MapEntrySet();
return entries;
}
/** An iterator on keys.
*
* <P>We simply override the {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods
* (and possibly their type-specific counterparts) so that they return keys
* instead of entries.
*/
private final class KeyIterator extends MapIterator implements DoubleIterator {
public KeyIterator() { super(); }
public double nextDouble() { return key[ nextEntry() ]; }
public Double next() { return (Double.valueOf(key[ nextEntry() ])); }
}
private final class KeySet extends AbstractDoubleSet {
public DoubleIterator iterator() {
return new KeyIterator();
}
public int size() {
return size;
}
public boolean contains( double k ) {
return containsKey( k );
}
public boolean remove( double k ) {
final int oldSize = size;
Double2FloatOpenHashMap.this.remove( k );
return size != oldSize;
}
public void clear() {
Double2FloatOpenHashMap.this.clear();
}
}
public DoubleSet keySet() {
if ( keys == null ) keys = new KeySet();
return keys;
}
/** An iterator on values.
*
* <P>We simply override the {@link java.util.ListIterator#next()}/{@link java.util.ListIterator#previous()} methods
* (and possibly their type-specific counterparts) so that they return values
* instead of entries.
*/
private final class ValueIterator extends MapIterator implements FloatIterator {
public ValueIterator() { super(); }
public float nextFloat() { return value[ nextEntry() ]; }
public Float next() { return (Float.valueOf(value[ nextEntry() ])); }
}
public FloatCollection values() {
if ( values == null ) values = new AbstractFloatCollection () {
public FloatIterator iterator() {
return new ValueIterator();
}
public int size() {
return size;
}
public boolean contains( float v ) {
return containsValue( v );
}
public void clear() {
Double2FloatOpenHashMap.this.clear();
}
};
return values;
}
/** A no-op for backward compatibility. The kind of tables implemented by
* this class never need rehashing.
*
* <P>If you need to reduce the table size to fit exactly
* this set, use {@link #trim()}.
*
* @return true.
* @see #trim()
* @deprecated A no-op.
*/
@Deprecated
public boolean rehash() {
return true;
}
/** Rehashes the map, making the table as small as possible.
*
* <P>This method rehashes the table to the smallest size satisfying the
* load factor. It can be used when the set will not be changed anymore, so
* to optimize access speed and size.
*
* <P>If the table size is already the minimum possible, this method
* does nothing.
*
* @return true if there was enough memory to trim the map.
* @see #trim(int)
*/
public boolean trim() {
final int l = arraySize( size, f );
if ( l >= n ) return true;
try {
rehash( l );
}
catch(OutOfMemoryError cantDoIt) { return false; }
return true;
}
/** Rehashes this map if the table is too large.
*
* <P>Let <var>N</var> be the smallest table size that can hold
* <code>max(n,{@link #size()})</code> entries, still satisfying the load factor. If the current
* table size is smaller than or equal to <var>N</var>, this method does
* nothing. Otherwise, it rehashes this map in a table of size
* <var>N</var>.
*
* <P>This method is useful when reusing maps. {@linkplain #clear() Clearing a
* map} leaves the table size untouched. If you are reusing a map
* many times, you can call this method with a typical
* size to avoid keeping around a very large table just
* because of a few large transient maps.
*
* @param n the threshold for the trimming.
* @return true if there was enough memory to trim the map.
* @see #trim()
*/
public boolean trim( final int n ) {
final int l = HashCommon.nextPowerOfTwo( (int)Math.ceil( n / f ) );
if ( this.n <= l ) return true;
try {
rehash( l );
}
catch( OutOfMemoryError cantDoIt ) { return false; }
return true;
}
/** Resizes the map.
*
* <P>This method implements the basic rehashing strategy, and may be
* overriden by subclasses implementing different rehashing strategies (e.g.,
* disk-based rehashing). However, you should not override this method
* unless you understand the internal workings of this class.
*
* @param newN the new size
*/
@SuppressWarnings("unchecked")
protected void rehash( final int newN ) {
int i = 0, pos;
final boolean used[] = this.used;
double k;
final double key[] = this.key;
final float value[] = this.value;
final int newMask = newN - 1;
final double newKey[] = new double[ newN ];
final float newValue[] = new float[newN];
final boolean newUsed[] = new boolean[ newN ];
for( int j = size; j-- != 0; ) {
while( ! used[ i ] ) i++;
k = key[ i ];
pos = (int)it.unimi.dsi.fastutil.HashCommon.murmurHash3(Double.doubleToRawLongBits(k)) & newMask;
while ( newUsed[ pos ] ) pos = ( pos + 1 ) & newMask;
newUsed[ pos ] = true;
newKey[ pos ] = k;
newValue[ pos ] = value[ i ];
i++;
}
n = newN;
mask = newMask;
maxFill = maxFill( n, f );
this.key = newKey;
this.value = newValue;
this.used = newUsed;
}
/** Returns a deep copy of this map.
*
* <P>This method performs a deep copy of this hash map; the data stored in the
* map, however, is not cloned. Note that this makes a difference only for object keys.
*
* @return a deep copy of this map.
*/
@SuppressWarnings("unchecked")
public Double2FloatOpenHashMap clone() {
Double2FloatOpenHashMap c;
try {
c = (Double2FloatOpenHashMap )super.clone();
}
catch(CloneNotSupportedException cantHappen) {
throw new InternalError();
}
c.keys = null;
c.values = null;
c.entries = null;
c.key = key.clone();
c.value = value.clone();
c.used = used.clone();
return c;
}
/** Returns a hash code for this map.
*
* This method overrides the generic method provided by the superclass.
* Since <code>equals()</code> is not overriden, it is important
* that the value returned by this method is the same value as
* the one returned by the overriden method.
*
* @return a hash code for this map.
*/
public int hashCode() {
int h = 0;
for( int j = size, i = 0, t = 0; j-- != 0; ) {
while( ! used[ i ] ) i++;
t = it.unimi.dsi.fastutil.HashCommon.double2int(key[ i ]);
t ^= it.unimi.dsi.fastutil.HashCommon.float2int(value[ i ]);
h += t;
i++;
}
return h;
}
private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException {
final double key[] = this.key;
final float value[] = this.value;
final MapIterator i = new MapIterator();
s.defaultWriteObject();
for( int j = size, e; j-- != 0; ) {
e = i.nextEntry();
s.writeDouble( key[ e ] );
s.writeFloat( value[ e ] );
}
}
@SuppressWarnings("unchecked")
private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException {
s.defaultReadObject();
n = arraySize( size, f );
maxFill = maxFill( n, f );
mask = n - 1;
final double key[] = this.key = new double[ n ];
final float value[] = this.value = new float[ n ];
final boolean used[] = this.used = new boolean[ n ];
double k;
float v;
for( int i = size, pos = 0; i-- != 0; ) {
k = s.readDouble();
v = s.readFloat();
pos = (int)it.unimi.dsi.fastutil.HashCommon.murmurHash3(Double.doubleToRawLongBits(k)) & mask;
while ( used[ pos ] ) pos = ( pos + 1 ) & mask;
used[ pos ] = true;
key[ pos ] = k;
value[ pos ] = v;
}
if ( ASSERTS ) checkTable();
}
private void checkTable() {}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.codecommit.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Information about an update to the source branch of a pull request.
* </p>
*
* @see <a
* href="http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/PullRequestSourceReferenceUpdatedEventMetadata"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class PullRequestSourceReferenceUpdatedEventMetadata implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The name of the repository where the pull request was updated.
* </p>
*/
private String repositoryName;
/**
* <p>
* The full commit ID of the commit in the destination branch that was the tip of the branch at the time the pull
* request was updated.
* </p>
*/
private String beforeCommitId;
/**
* <p>
* The full commit ID of the commit in the source branch that was the tip of the branch at the time the pull request
* was updated.
* </p>
*/
private String afterCommitId;
/**
* <p>
* The commit ID of the most recent commit that the source branch and the destination branch have in common.
* </p>
*/
private String mergeBase;
/**
* <p>
* The name of the repository where the pull request was updated.
* </p>
*
* @param repositoryName
* The name of the repository where the pull request was updated.
*/
public void setRepositoryName(String repositoryName) {
this.repositoryName = repositoryName;
}
/**
* <p>
* The name of the repository where the pull request was updated.
* </p>
*
* @return The name of the repository where the pull request was updated.
*/
public String getRepositoryName() {
return this.repositoryName;
}
/**
* <p>
* The name of the repository where the pull request was updated.
* </p>
*
* @param repositoryName
* The name of the repository where the pull request was updated.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PullRequestSourceReferenceUpdatedEventMetadata withRepositoryName(String repositoryName) {
setRepositoryName(repositoryName);
return this;
}
/**
* <p>
* The full commit ID of the commit in the destination branch that was the tip of the branch at the time the pull
* request was updated.
* </p>
*
* @param beforeCommitId
* The full commit ID of the commit in the destination branch that was the tip of the branch at the time the
* pull request was updated.
*/
public void setBeforeCommitId(String beforeCommitId) {
this.beforeCommitId = beforeCommitId;
}
/**
* <p>
* The full commit ID of the commit in the destination branch that was the tip of the branch at the time the pull
* request was updated.
* </p>
*
* @return The full commit ID of the commit in the destination branch that was the tip of the branch at the time the
* pull request was updated.
*/
public String getBeforeCommitId() {
return this.beforeCommitId;
}
/**
* <p>
* The full commit ID of the commit in the destination branch that was the tip of the branch at the time the pull
* request was updated.
* </p>
*
* @param beforeCommitId
* The full commit ID of the commit in the destination branch that was the tip of the branch at the time the
* pull request was updated.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PullRequestSourceReferenceUpdatedEventMetadata withBeforeCommitId(String beforeCommitId) {
setBeforeCommitId(beforeCommitId);
return this;
}
/**
* <p>
* The full commit ID of the commit in the source branch that was the tip of the branch at the time the pull request
* was updated.
* </p>
*
* @param afterCommitId
* The full commit ID of the commit in the source branch that was the tip of the branch at the time the pull
* request was updated.
*/
public void setAfterCommitId(String afterCommitId) {
this.afterCommitId = afterCommitId;
}
/**
* <p>
* The full commit ID of the commit in the source branch that was the tip of the branch at the time the pull request
* was updated.
* </p>
*
* @return The full commit ID of the commit in the source branch that was the tip of the branch at the time the pull
* request was updated.
*/
public String getAfterCommitId() {
return this.afterCommitId;
}
/**
* <p>
* The full commit ID of the commit in the source branch that was the tip of the branch at the time the pull request
* was updated.
* </p>
*
* @param afterCommitId
* The full commit ID of the commit in the source branch that was the tip of the branch at the time the pull
* request was updated.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PullRequestSourceReferenceUpdatedEventMetadata withAfterCommitId(String afterCommitId) {
setAfterCommitId(afterCommitId);
return this;
}
/**
* <p>
* The commit ID of the most recent commit that the source branch and the destination branch have in common.
* </p>
*
* @param mergeBase
* The commit ID of the most recent commit that the source branch and the destination branch have in common.
*/
public void setMergeBase(String mergeBase) {
this.mergeBase = mergeBase;
}
/**
* <p>
* The commit ID of the most recent commit that the source branch and the destination branch have in common.
* </p>
*
* @return The commit ID of the most recent commit that the source branch and the destination branch have in common.
*/
public String getMergeBase() {
return this.mergeBase;
}
/**
* <p>
* The commit ID of the most recent commit that the source branch and the destination branch have in common.
* </p>
*
* @param mergeBase
* The commit ID of the most recent commit that the source branch and the destination branch have in common.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public PullRequestSourceReferenceUpdatedEventMetadata withMergeBase(String mergeBase) {
setMergeBase(mergeBase);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getRepositoryName() != null)
sb.append("RepositoryName: ").append(getRepositoryName()).append(",");
if (getBeforeCommitId() != null)
sb.append("BeforeCommitId: ").append(getBeforeCommitId()).append(",");
if (getAfterCommitId() != null)
sb.append("AfterCommitId: ").append(getAfterCommitId()).append(",");
if (getMergeBase() != null)
sb.append("MergeBase: ").append(getMergeBase());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof PullRequestSourceReferenceUpdatedEventMetadata == false)
return false;
PullRequestSourceReferenceUpdatedEventMetadata other = (PullRequestSourceReferenceUpdatedEventMetadata) obj;
if (other.getRepositoryName() == null ^ this.getRepositoryName() == null)
return false;
if (other.getRepositoryName() != null && other.getRepositoryName().equals(this.getRepositoryName()) == false)
return false;
if (other.getBeforeCommitId() == null ^ this.getBeforeCommitId() == null)
return false;
if (other.getBeforeCommitId() != null && other.getBeforeCommitId().equals(this.getBeforeCommitId()) == false)
return false;
if (other.getAfterCommitId() == null ^ this.getAfterCommitId() == null)
return false;
if (other.getAfterCommitId() != null && other.getAfterCommitId().equals(this.getAfterCommitId()) == false)
return false;
if (other.getMergeBase() == null ^ this.getMergeBase() == null)
return false;
if (other.getMergeBase() != null && other.getMergeBase().equals(this.getMergeBase()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getRepositoryName() == null) ? 0 : getRepositoryName().hashCode());
hashCode = prime * hashCode + ((getBeforeCommitId() == null) ? 0 : getBeforeCommitId().hashCode());
hashCode = prime * hashCode + ((getAfterCommitId() == null) ? 0 : getAfterCommitId().hashCode());
hashCode = prime * hashCode + ((getMergeBase() == null) ? 0 : getMergeBase().hashCode());
return hashCode;
}
@Override
public PullRequestSourceReferenceUpdatedEventMetadata clone() {
try {
return (PullRequestSourceReferenceUpdatedEventMetadata) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.codecommit.model.transform.PullRequestSourceReferenceUpdatedEventMetadataMarshaller.getInstance().marshall(this,
protocolMarshaller);
}
}
| |
package rres.knetminer.datasource.ondexlocal;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.commons.lang3.mutable.MutableObject;
import org.apache.lucene.queryparser.classic.ParseException;
import org.json.JSONException;
import org.json.JSONObject;
import org.springframework.stereotype.Component;
import com.google.common.base.Functions;
import net.sourceforge.ondex.core.ONDEXConcept;
import net.sourceforge.ondex.core.ONDEXGraph;
import rres.knetminer.datasource.api.CountGraphEntities;
import rres.knetminer.datasource.api.CountHitsResponse;
import rres.knetminer.datasource.api.CountLociResponse;
import rres.knetminer.datasource.api.EvidencePathResponse;
import rres.knetminer.datasource.api.GenomeResponse;
import rres.knetminer.datasource.api.GraphSummaryResponse;
import rres.knetminer.datasource.api.KeywordResponse;
import rres.knetminer.datasource.api.KnetSpaceHost;
import rres.knetminer.datasource.api.KnetminerDataSource;
import rres.knetminer.datasource.api.KnetminerRequest;
import rres.knetminer.datasource.api.LatestNetworkStatsResponse;
import rres.knetminer.datasource.api.NetworkResponse;
import rres.knetminer.datasource.api.QtlResponse;
import rres.knetminer.datasource.api.SynonymsResponse;
import rres.knetminer.datasource.ondexlocal.service.OndexServiceProvider;
import rres.knetminer.datasource.ondexlocal.service.SemanticMotifsSearchResult;
import rres.knetminer.datasource.ondexlocal.service.utils.ExportUtils;
import uk.ac.ebi.utils.exceptions.ExceptionUtils;
/**
* A KnetminerDataSource that knows how to load ONDEX indexes into memory and query them. Specific
* instances of this abstract class simply call the constructor with appropriate values for dsName
* (the name of the source, i.e. the 'X' in the /X/Y URl pattern), and the path to the config XML and
* semantic motifs files in the resources package (which this abstract class lacks).
*
* TODO: Although these responses are all JSON, some of the fields include embedded XML, Javascript,
* tab-delimited or other formats within them as strings. In future these should be converted to
* pure JSON.
*
* @author holland
* @author Marco Brandizi Replaced the parameterised constructor + introduced the config harvester, several improvements
*
* Note that the @Component annotation is necessary since Spring 5, it's not recognised as a bean otherwise and
* despite extending a @Component interface.
*
*/
@Component
public class OndexLocalDataSource extends KnetminerDataSource
{
/**
* it's initialised without parameters, then it gets everything from the XML config file. This is fetched by
* {@link ConfigFileHarvester}, which seeks it in {@code WEB-INF/web.xml} (see the aratiny WAR module).
*
*/
public OndexLocalDataSource () {
init ();
}
private void init ()
{
var configXmlPath = ConfigFileHarvester.getConfigFilePath ();
if ( configXmlPath == null ) throw new IllegalStateException (
"OndexLocalDataSource() can only be called if you set " + ConfigFileHarvester.CONFIG_FILE_PATH_PROP
+ ", either as a Java property, a <context-param> in web.xml, or"
+ " a Param in a Tomcat context file (https://serverfault.com/a/126430)"
);
var ondexServiceProvider = OndexServiceProvider.getInstance ();
var dataService = ondexServiceProvider.getDataService ();
// this pre-loads some properties in advance, so that we have what we need (ie, data source name) to be able
// to start answering the API URLs
// This is also quick enough to be done synchronously.
dataService.loadOptions ( configXmlPath );
var dsName = dataService.getDataSourceName ();
if ( dsName == null ) throw new IllegalArgumentException (
this.getClass ().getSimpleName () + " requires a DataSourceName, either from its extensions or the config file"
);
this.setDataSourceNames ( new String[] { dsName } );
log.info ( "Setting data source '{}'", dsName );
// Now we load the data asynchronously, so that the JDK and the web container aren't stuck on it.
// The ondexServiceProvider.getInstance() will return a NotReadyException exception until this isn't finished,
// that will be forwarded back to the client by any call requiring the OSP.
ExecutorService asyncRunner = Executors.newSingleThreadExecutor ();
asyncRunner.submit ( () -> ondexServiceProvider.initData () );
log.info ( "Asynchronous Ondex initialisation started" );
}
public CountHitsResponse countHits(String dsName, KnetminerRequest request) throws IllegalArgumentException
{
var ondexServiceProvider = OndexServiceProvider.getInstance ();
SemanticMotifSearchMgr hits = new SemanticMotifSearchMgr(request.getKeyword(), ondexServiceProvider, null);
CountHitsResponse response = new CountHitsResponse();
response.setLuceneCount(hits.getLuceneConcepts().size()); // number of Lucene documents
response.setLuceneLinkedCount(hits.getLuceneDocumentsLinked()); // number of Lucene documents related to genes
response.setGeneCount(hits.getNumConnectedGenes()); // count unique genes linked to Lucene documents
return response;
}
public SynonymsResponse synonyms(String dsName, KnetminerRequest request) throws IllegalArgumentException
{
try
{
var ondexServiceProvider = OndexServiceProvider.getInstance ();
SynonymsResponse response = new SynonymsResponse();
response.setSynonyms(ondexServiceProvider.getUIService ().renderSynonymTable(request.getKeyword()));
return response;
}
catch (ParseException e)
{
IllegalArgumentException wex = ExceptionUtils.buildEx (
IllegalArgumentException.class,
e,
"Error while counting synonyms for \"%s\": %s",
Optional.ofNullable ( request ).map ( KnetminerRequest::getKeyword ).orElse ( "<null response>" ),
e.getMessage ()
);
log.error ( wex );
throw wex;
}
}
public CountLociResponse countLoci(String dsName, KnetminerRequest request) throws IllegalArgumentException {
String[] loci = request.getKeyword().split("-");
String chr = loci[0];
int start = 0, end = 0;
if (loci.length > 1) {
start = Integer.parseInt(loci[1]);
}
if (loci.length > 2) {
end = Integer.parseInt(loci[2]);
}
log.info("Counting loci "+chr+":"+start+":"+end);
CountLociResponse response = new CountLociResponse();
response.setGeneCount (
OndexServiceProvider.getInstance ()
.getDataService()
.getLociGeneCount(chr, start, end)
);
return response;
}
public GenomeResponse genome(String dsName, KnetminerRequest request) throws IllegalArgumentException {
GenomeResponse response = new GenomeResponse();
this._keyword(response, request);
return response;
}
public QtlResponse qtl(String dsName, KnetminerRequest request) throws IllegalArgumentException {
QtlResponse response = new QtlResponse();
this._keyword(response, request);
return response;
}
private <T extends KeywordResponse> T _keyword(T response, KnetminerRequest request) throws IllegalArgumentException
{
// Find genes from the user's gene list
Set<ONDEXConcept> userGenes = new HashSet<>();
var ondexServiceProvider = OndexServiceProvider.getInstance ();
var searchService = ondexServiceProvider.getSearchService ();
var exportService = ondexServiceProvider.getExportService ();
if (request.getList() != null && request.getList().size() > 0) {
userGenes.addAll ( searchService.filterGenesByAccessionKeywords( request.getList() ) );
log.info("Number of user provided genes: " + userGenes.size());
}
// Also search Regions - only if no genes provided
if ( userGenes.isEmpty() && !request.getQtl().isEmpty() ) {
userGenes.addAll ( searchService.fetchQTLs ( request.getQtl() ) );
}
// Genome search
log.info ( "Processing search mode: {}", response.getClass().getName() );
SemanticMotifSearchMgr smSearchMgr = new SemanticMotifSearchMgr ( request.getKeyword(), ondexServiceProvider, userGenes );
Map<ONDEXConcept, Double> candidateGenesMap = Map.of();
Stream<ONDEXConcept> genesStream = Stream.of ();
// TODO: remove. response can't be anything else and if you need for its extensions, follow the S-of-SOLID principle,
// we have already enough mess here.
// if (response.getClass().equals( GenomeResponse.class ) || response.getClass().equals ( QtlResponse.class ) )
// {
log.info ( "Computing response to /genome or /qtl" );
candidateGenesMap = smSearchMgr.getSortedCandidates();
Set<ONDEXConcept> candidateGenes = candidateGenesMap.keySet ();
genesStream = candidateGenes.parallelStream ();
if ( !userGenes.isEmpty () )
{
log.info ( "Filtering {} user genes from {} candidate gene(s)", userGenes.size (), candidateGenes.size() );
genesStream = userGenes.parallelStream ();
} // if userGenes
if ( response.getClass().equals ( QtlResponse.class ) )
{
log.info ( "Filtering QTL(s) for QTL response " );
Set<ONDEXConcept> genesQTL = searchService.fetchQTLs ( request.getQtl() );
log.info ( "Keeping {} QTL(s)", genesQTL.size () );
genesStream = genesStream.filter ( genesQTL::contains );
// TODO: log.info("Genes after QTL filter: " + genes.size());
}
// } // genome & qtl cases // TODO: remove, see above
final var candidatesProxy = new MutableObject<> ( candidateGenesMap ); // lambdas doesn't want non-finals
Map<ONDEXConcept, Double> genesMap = genesStream.collect (
Collectors.toConcurrentMap ( Functions.identity (), gene -> candidatesProxy.getValue ().getOrDefault ( gene, 0d ) )
);
candidatesProxy.setValue ( candidateGenesMap = null ); // Free-up memory
// Genes are expected in order
List<ONDEXConcept> genes = genesMap.keySet ()
.parallelStream ()
.sorted ( (g1, g2) -> - Double.compare ( genesMap.get ( g1 ), genesMap.get ( g2 ) ) )
.collect ( Collectors.toList () );
if ( genes.size () == 0 ) return response;
// We have genes, let's use them to build actual output
//
// Chromosome view
//
String xmlGViewer = "";
if (ondexServiceProvider.getDataService ().isReferenceGenome () )
{
// Generate Annotation file.
log.debug("1.) API, doing chrome annotation");
xmlGViewer = exportService.exportGenomapXML (
this.getApiUrl(), genes, userGenes, request.getQtl(),
request.getKeyword(), 1000, genesMap
);
log.debug("Chrome annotation done");
}
else
log.debug("1.) API, no reference genome for Genomaps annotation, skipping ");
// Gene table
//
// TODO: no idea why geneMap is recalculated here instead of a more proper place, anyway, let's
// adapt to it
log.debug("2.) API, doing gene table view");
var newSearchResult = new SemanticMotifsSearchResult (
smSearchMgr.getGeneId2RelatedConceptIds (), genesMap
);
String geneTable = exportService.exportGeneTable (
genes, userGenes, request.getQtl(), request.getListMode(), newSearchResult
);
log.debug("Gene table done");
// Evidence table
//
log.debug ( "3) API, doing evidence table" );
String evidenceTable = exportService.exportEvidenceTable (
request.getKeyword(), smSearchMgr.getLuceneConcepts(), userGenes, request.getQtl()
);
log.debug ( "Evidence table done" );
int docSize = searchService
.getMapEvidences2Genes ( smSearchMgr.getLuceneConcepts() )
.size();
// Total documents
int totalDocSize = smSearchMgr.getLuceneConcepts().size();
// We have annotation and table file
response.setGViewer ( xmlGViewer );
response.setGeneTable ( geneTable );
response.setEvidenceTable ( evidenceTable );
response.setGeneCount ( genes.size () );
response.setDocSize ( docSize );
response.setTotalDocSize ( totalDocSize );
return response;
}
public NetworkResponse network(String dsName, KnetminerRequest request) throws IllegalArgumentException
{
Set<ONDEXConcept> genes = new HashSet<>();
log.info( "network(), searching {} gene(s)", request.getList().size() );
var ondexServiceProvider = OndexServiceProvider.getInstance ();
var searchService = ondexServiceProvider.getSearchService ();
// Search Genes
if (!request.getList().isEmpty()) {
genes.addAll(searchService.filterGenesByAccessionKeywords(request.getList()));
}
// Search Regions
if (!request.getQtl().isEmpty()) {
genes.addAll(searchService.fetchQTLs(request.getQtl()));
}
// Find Semantic Motifs
ONDEXGraph subGraph = ondexServiceProvider.getSemanticMotifService ().findSemanticMotifs(genes, request.getKeyword());
// Export graph
var response = new NetworkResponse();
response.setGraph(ExportUtils.exportGraph2Json(subGraph).getLeft());
return response;
}
public EvidencePathResponse evidencePath(String dsName, KnetminerRequest request) throws IllegalArgumentException
{
int evidenceOndexID = Integer.parseInt(request.getKeyword());
Set<ONDEXConcept> genes = new HashSet<>();
var ondexServiceProvider = OndexServiceProvider.getInstance ();
var searchService = ondexServiceProvider.getSearchService ();
var semanticMotifService = ondexServiceProvider.getSemanticMotifService ();
// Search Genes
if (!request.getList().isEmpty()) {
genes.addAll(searchService.filterGenesByAccessionKeywords(request.getList()));
}
ONDEXGraph subGraph = semanticMotifService.findEvidencePaths(evidenceOndexID, genes);
// Export graph
var response = new EvidencePathResponse();
response.setGraph(ExportUtils.exportGraph2Json(subGraph).getLeft ());
return response;
}
public LatestNetworkStatsResponse latestNetworkStats(String dsName, KnetminerRequest request) throws IllegalArgumentException {
LatestNetworkStatsResponse response = new LatestNetworkStatsResponse();
try {
var opts = OndexServiceProvider.getInstance ().getDataService ().getOptions ();
byte[] encoded = Files.readAllBytes(Paths.get(opts.getString("DataPath"), "latestNetwork_Stats.tab"));
response.stats = new String(encoded, Charset.defaultCharset());
} catch (IOException ex) {
log.error(ex);
throw new Error(ex);
}
return response;
}
public GraphSummaryResponse dataSource(String dsName, KnetminerRequest request) throws IllegalArgumentException
{
GraphSummaryResponse response = new GraphSummaryResponse();
try {
var ondexServiceProvider = OndexServiceProvider.getInstance ();
var odxData = ondexServiceProvider.getDataService ();
// Parse the data into a JSON format & set the graphSummary as is.
// This data is obtained from the maven-settings.xml
JSONObject summaryJSON = new JSONObject();
summaryJSON.put("dbVersion", odxData.getDatasetVersion () );
summaryJSON.put("sourceOrganization", odxData.getDatasetOrganization ());
odxData.getTaxIds ().forEach((taxID) -> {
summaryJSON.put("speciesTaxid", taxID);
});
summaryJSON.put("speciesName", odxData.getSpecies());
// TODO: initially, this was set with ondexServiceProvider.getCreationDate()
// which corresponded to the server's starting time.
// TODO: after discussion, we require this to come from the OXL's last-modified date
// (and later, from inside the OXL, together with graph metadata
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm");
var timestampStr = formatter.format ( new Date() );
summaryJSON.put("dbDateCreated", timestampStr);
summaryJSON.put("provider", odxData.getDatasetProvider () );
String jsonString = summaryJSON.toString();
// Removing the pesky double quotes
jsonString = jsonString.substring(1, jsonString.length() - 1);
log.info("response.dataSource= " + jsonString); // test
response.dataSource = jsonString;
} catch (JSONException ex) {
log.error(ex);
throw new Error(ex);
}
return response;
}
public CountGraphEntities geneCount(String dsName, KnetminerRequest request) throws IllegalArgumentException
{
log.info("geneCount() Search genes " + request.getList().size());
Set<ONDEXConcept> genes = new HashSet<>();
var ondexServiceProvider = OndexServiceProvider.getInstance ();
var searchService = ondexServiceProvider.getSearchService ();
// Search Genes
if (!request.getList().isEmpty()) {
genes.addAll(searchService.filterGenesByAccessionKeywords(request.getList()));
}
// Search Regions
if (!request.getQtl().isEmpty()) {
genes.addAll(searchService.fetchQTLs(request.getQtl()));
}
// Find Semantic Motifs
ONDEXGraph subGraph =
ondexServiceProvider.getSemanticMotifService ().findSemanticMotifs(genes, request.getKeyword());
var response = new CountGraphEntities();
// Set the graph
var jsonGraph = ExportUtils.exportGraph2Json(subGraph).getRight ();
log.info("Set graph, now getting the number of nodes...");
response.setNodeCount( Integer.toString ( jsonGraph.getConcepts ().size () ) );
response.setRelationshipCount( Integer.toString ( jsonGraph.getRelations ().size () ) );
return response;
}
public KnetSpaceHost ksHost(String dsName, KnetminerRequest request) throws IllegalArgumentException {
KnetSpaceHost response = new KnetSpaceHost();
response.setKsHostUrl(OndexServiceProvider.getInstance ().getDataService ().getKnetSpaceHost ());
return response;
}
}
| |
package org.jgroups.tests.perf;
import org.jgroups.*;
import org.jgroups.annotations.Property;
import org.jgroups.blocks.*;
import org.jgroups.protocols.*;
import org.jgroups.protocols.pbcast.GMS;
import org.jgroups.protocols.pbcast.NAKACK2;
import org.jgroups.protocols.pbcast.STABLE;
import org.jgroups.stack.AddressGenerator;
import org.jgroups.stack.NonReflectiveProbeHandler;
import org.jgroups.stack.Protocol;
import org.jgroups.util.*;
import java.net.InetAddress;
import java.util.*;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.LongAdder;
import static org.jgroups.tests.perf.PerfUtil.*;
/**
* Tests the UNICAST by invoking unicast RPCs between a sender and a receiver. Mimicks the DIST mode in Infinispan
*
* @author Bela Ban
*/
public class ProgrammaticUPerf implements Receiver, MethodInvoker {
private JChannel channel;
private Address local_addr;
private RpcDispatcher disp;
static final String groupname="uperf";
protected final List<Address> members=new ArrayList<>();
protected volatile View view;
protected volatile boolean looping=true;
protected Thread event_loop_thread;
protected final LongAdder num_reads=new LongAdder();
protected final LongAdder num_writes=new LongAdder();
// ============ configurable properties ==================
@Property protected boolean sync=true, oob=true;
@Property protected int num_threads=100;
@Property protected int time=60; // in seconds
@Property protected int msg_size=1000;
@Property protected int anycast_count=2;
@Property protected double read_percentage=0.8; // 80% reads, 20% writes
@Property protected boolean allow_local_gets=true;
@Property protected boolean print_invokers;
@Property protected boolean print_details;
// ... add your own here, just don't forget to annotate them with @Property
// =======================================================
private static final short START = 0;
private static final short GET = 1;
private static final short PUT = 2;
private static final short GET_CONFIG = 3;
private static final short SET_SYNC = 4;
private static final short SET_OOB = 5;
private static final short SET_NUM_THREADS = 6;
private static final short SET_TIME = 7;
private static final short SET_MSG_SIZE = 8;
private static final short SET_ANYCAST_COUNT = 9;
private static final short SET_READ_PERCENTAGE = 10;
private static final short ALLOW_LOCAL_GETS = 11;
private static final short PRINT_INVOKERS = 12;
private static final short PRINT_DETAILS = 13;
private static final short QUIT_ALL = 14;
private final AtomicInteger COUNTER=new AtomicInteger(1);
private byte[] BUFFER=new byte[msg_size];
protected static final String format=
"[1] Start test [2] View [4] Threads (%d) [6] Time (%,ds) [7] Msg size (%s)" +
"\n[s] Sync (%b) [o] OOB (%b)" +
"\n[a] Anycast count (%d) [r] Read percentage (%.2f) " +
"\n[l] local gets (%b) [d] print details (%b) [i] print invokers (%b)" +
"\n[v] Version [x] Exit [X] Exit all\n";
static {
PerfUtil.init();
}
public boolean getSync() {return sync;}
public void setSync(boolean s) {this.sync=s;}
public boolean getOOB() {return oob;}
public void setOOB(boolean oob) {this.oob=oob;}
public int getNumThreads() {return num_threads;}
public void setNumThreads(int t) {this.num_threads=t;}
public int getTime() {return time;}
public void setTime(int t) {this.time=t;}
public int getMsgSize() {return msg_size;}
public void setMsgSize(int t) {this.msg_size=t;}
public int getAnycastCount() {return anycast_count;}
public void setAnycastCount(int t) {this.anycast_count=t;}
public double getReadPercentage() {return read_percentage;}
public void setReadPercentage(double r) {this.read_percentage=r;}
public boolean allowLocalGets() {return allow_local_gets;}
public void allowLocalGets(boolean a) {this.allow_local_gets=a;}
public boolean printInvokers() {return print_invokers;}
public void printInvokers(boolean p) {this.print_invokers=p;}
public boolean printDetails() {return print_details;}
public void printDetails(boolean p) {this.print_details=p;}
public void init(String name, AddressGenerator generator, String bind_addr, int bind_port,
boolean udp, String mcast_addr, int mcast_port,
String initial_hosts) throws Throwable {
InetAddress bind_address=bind_addr != null? Util.getAddress(bind_addr, Util.getIpStackType()) : Util.getLoopback();
Protocol[] prot_stack={
null, // transport
null, // discovery protocol
new MERGE3(),
new FD_SOCK(),
new FD_ALL3(),
new VERIFY_SUSPECT(),
new NAKACK2(),
new UNICAST3(),
new STABLE(),
new GMS().setJoinTimeout(1000),
new UFC(),
new MFC(),
new FRAG4()};
if(udp) {
UDP u=new UDP().setMulticastAddress(InetAddress.getByName(mcast_addr)).setMulticastPort(mcast_port);
u.getDiagnosticsHandler().setMcastAddress(InetAddress.getByName("224.0.75.75")).enableUdp(true);
prot_stack[0]=u;
prot_stack[1]=new PING();
}
else {
if(initial_hosts == null) {
InetAddress host=bind_addr == null? InetAddress.getLocalHost() : Util.getAddress(bind_addr, Util.getIpStackType());
initial_hosts=String.format("%s[%d]", host.getHostAddress(), bind_port);
}
TCP tcp=new TCP();
tcp.getDiagnosticsHandler().enableUdp(false).enableTcp(true);
prot_stack[0]=tcp;
prot_stack[1]=new TCPPING().setInitialHosts2(Util.parseCommaDelimitedHosts(initial_hosts, 2));
}
((TP)prot_stack[0]).setBindAddress(bind_address).setBindPort(bind_port);
channel=new JChannel(prot_stack).addAddressGenerator(generator).setName(name);
TP transport=channel.getProtocolStack().getTransport();
// todo: remove default ProbeHandler for "jmx" and "op"
NonReflectiveProbeHandler h=new NonReflectiveProbeHandler(channel);
transport.registerProbeHandler(h);
h.initialize(channel.getProtocolStack().getProtocols());
// System.out.printf("contents:\n%s\n", h.dump());
disp=new RpcDispatcher(channel, this).setReceiver(this).setMethodInvoker(this);
channel.connect(groupname);
local_addr=channel.getAddress();
if(members.size() < 2)
return;
Address coord=members.get(0);
Config config=disp.callRemoteMethod(coord, new CustomCall(GET_CONFIG), new RequestOptions(ResponseMode.GET_ALL, 5000));
if(config != null) {
applyConfig(config);
System.out.println("Fetched config from " + coord + ": " + config + "\n");
}
else
System.err.println("failed to fetch config from " + coord);
}
void stop() {
Util.close(disp, channel);
}
protected void startEventThread() {
event_loop_thread=new Thread(ProgrammaticUPerf.this::eventLoop, "EventLoop");
event_loop_thread.setDaemon(true);
event_loop_thread.start();
}
protected void stopEventThread() {
Thread tmp=event_loop_thread;
looping=false;
if(tmp != null)
tmp.interrupt();
Util.close(channel);
}
public void viewAccepted(View new_view) {
this.view=new_view;
System.out.println("** view: " + new_view);
members.clear();
members.addAll(new_view.getMembers());
}
public Object invoke(Object target, short method_id, Object[] args) throws Exception {
ProgrammaticUPerf uperf=(ProgrammaticUPerf)target;
Boolean bool_val;
switch(method_id) {
case START:
return uperf.startTest();
case GET:
Integer key=(Integer)args[0];
return uperf.get(key);
case PUT:
key=(Integer)args[0];
byte[] val=(byte[])args[1];
uperf.put(key, val);
return null;
case GET_CONFIG:
return uperf.getConfig();
case SET_SYNC:
uperf.setSync((Boolean)args[0]);
return null;
case SET_OOB:
bool_val=(Boolean)args[0];
uperf.setOOB(bool_val);
return null;
case SET_NUM_THREADS:
uperf.setNumThreads((Integer)args[0]);
return null;
case SET_TIME:
uperf.setTime((Integer)args[0]);
return null;
case SET_MSG_SIZE:
uperf.setMsgSize((Integer)args[0]);
return null;
case SET_ANYCAST_COUNT:
uperf.setAnycastCount((Integer)args[0]);
return null;
case SET_READ_PERCENTAGE:
uperf.setReadPercentage((Double)args[0]);
return null;
case ALLOW_LOCAL_GETS:
uperf.allowLocalGets((Boolean)args[0]);
return null;
case PRINT_INVOKERS:
uperf.printInvokers((Boolean)args[0]);
return null;
case PRINT_DETAILS:
uperf.printDetails((Boolean)args[0]);
return null;
case QUIT_ALL:
uperf.quitAll();
return null;
default:
throw new IllegalArgumentException("method with id=" + method_id + " not found");
}
}
// =================================== callbacks ======================================
public Results startTest() throws Exception {
BUFFER=new byte[msg_size];
System.out.printf("running for %d seconds\n", time);
final CountDownLatch latch=new CountDownLatch(1);
num_reads.reset(); num_writes.reset();
Invoker[] invokers=new Invoker[num_threads];
for(int i=0; i < invokers.length; i++) {
invokers[i]=new Invoker(members, latch);
invokers[i].start(); // waits on latch
}
long start=System.currentTimeMillis();
latch.countDown();
long interval=(long)((time * 1000.0) / 10.0);
for(int i=1; i <= 10; i++) {
Util.sleep(interval);
System.out.printf("%d: %s\n", i, printAverage(start));
}
for(Invoker invoker: invokers)
invoker.cancel();
for(Invoker invoker: invokers)
invoker.join();
long total_time=System.currentTimeMillis() - start;
System.out.println();
AverageMinMax avg_gets=null, avg_puts=null;
for(Invoker invoker: invokers) {
if(print_invokers)
System.out.printf("invoker %s: gets %s puts %s\n", invoker.getId(),
print(invoker.avgGets(), print_details), print(invoker.avgPuts(), print_details));
if(avg_gets == null)
avg_gets=invoker.avgGets();
else
avg_gets.merge(invoker.avgGets());
if(avg_puts == null)
avg_puts=invoker.avgPuts();
else
avg_puts.merge(invoker.avgPuts());
}
if(print_invokers)
System.out.printf("\navg over all invokers: gets %s puts %s\n",
print(avg_gets, print_details), print(avg_puts, print_details));
System.out.printf("\ndone (in %s ms)\n", total_time);
return new Results((int)num_reads.sum(), (int)num_writes.sum(), total_time, avg_gets, avg_puts);
}
public void quitAll() {
System.out.println("-- received quitAll(): shutting down");
stopEventThread();
}
protected String printAverage(long start_time) {
long tmp_time=System.currentTimeMillis() - start_time;
long reads=num_reads.sum(), writes=num_writes.sum();
double reqs_sec=(reads+writes) / (tmp_time / 1000.0);
return String.format("%,.0f reqs/sec (%,d reads %,d writes)", reqs_sec, reads, writes);
}
public byte[] get(@SuppressWarnings("UnusedParameters")int key) {
return BUFFER;
}
@SuppressWarnings("UnusedParameters")
public void put(int key, byte[] val) {
}
public Config getConfig() {
Config c=new Config();
c.add("sync", sync).add("oob", oob).add("num_threads", num_threads).add("time", time).add("msg_size", msg_size)
.add("anycast_count", anycast_count).add("read_percentage", read_percentage)
.add("allow_local_gets", allow_local_gets).add("print_invokers", print_invokers).add("print_details", print_details);
return c;
}
protected void applyConfig(Config config) {
for(Map.Entry<String,Object> e: config.values.entrySet()) {
String name=e.getKey();
Object value=e.getValue();
switch(name) {
case "sync":
setSync((Boolean)value);
break;
case "oob":
setOOB((Boolean)value);
break;
case "num_threads":
setNumThreads((Integer)value);
break;
case "time":
setTime((Integer)value);
break;
case "msg_size":
setMsgSize((Integer)value);
break;
case "anycast_count":
setAnycastCount((Integer)value);
break;
case "read_percentage":
setReadPercentage((Double)value);
break;
case "allow_local_gets":
allowLocalGets((Boolean)value);
break;
case "print_invokers":
printInvokers((Boolean)value);
break;
case "print_details":
printDetails((Boolean)value);
break;
default:
throw new IllegalArgumentException("field with name " + name + " not known");
}
}
}
// ================================= end of callbacks =====================================
public void eventLoop() {
while(looping) {
try {
int c=Util.keyPress(String.format(format, num_threads, time, Util.printBytes(msg_size),
sync, oob, anycast_count, read_percentage,
allow_local_gets, print_details, print_invokers));
switch(c) {
case '1':
startBenchmark();
break;
case '2':
printView();
break;
case '4':
invoke(SET_NUM_THREADS, Util.readIntFromStdin("Number of sender threads: "));
break;
case '6':
invoke(SET_TIME, Util.readIntFromStdin("Time (secs): "));
break;
case '7':
invoke(SET_MSG_SIZE, Util.readIntFromStdin("Message size: "));
break;
case 'a':
int tmp=parseAnycastCount();
if(tmp >= 0)
invoke(SET_ANYCAST_COUNT, tmp);
break;
case 'o':
invoke(SET_OOB, !oob);
break;
case 's':
invoke(SET_SYNC, !sync);
break;
case 'r':
double percentage=parseReadPercentage();
if(percentage >= 0)
invoke(SET_READ_PERCENTAGE, percentage);
break;
case 'd':
invoke(PRINT_DETAILS, !print_details);
break;
case 'i':
invoke(PRINT_INVOKERS, !print_invokers);
break;
case 'l':
invoke(ALLOW_LOCAL_GETS, !allow_local_gets);
break;
case 'v':
System.out.printf("Version: %s\n", Version.printVersion());
break;
case 'x':
case -1:
looping=false;
break;
case 'X':
try {
RequestOptions options=new RequestOptions(ResponseMode.GET_NONE, 0)
.flags(Message.Flag.OOB, Message.Flag.DONT_BUNDLE, Message.Flag.NO_FC);
disp.callRemoteMethods(null, new CustomCall(QUIT_ALL), options);
break;
}
catch(Throwable t) {
System.err.println("Calling quitAll() failed: " + t);
}
break;
default:
break;
}
}
catch(Throwable t) {
t.printStackTrace();
}
}
stop();
}
void invoke(short method_id, Object... args) throws Exception {
MethodCall call=new CustomCall(method_id, args);
disp.callRemoteMethods(null, call, RequestOptions.SYNC());
}
/** Kicks off the benchmark on all cluster nodes */
void startBenchmark() {
RspList<Results> responses=null;
try {
RequestOptions options=new RequestOptions(ResponseMode.GET_ALL, 0);
options.flags(Message.Flag.OOB, Message.Flag.DONT_BUNDLE, Message.Flag.NO_FC);
responses=disp.callRemoteMethods(null, new CustomCall(START), options);
}
catch(Throwable t) {
System.err.println("starting the benchmark failed: " + t);
return;
}
long total_reqs=0;
long total_time=0;
AverageMinMax avg_gets=null, avg_puts=null;
System.out.println("\n======================= Results: ===========================");
for(Map.Entry<Address,Rsp<Results>> entry: responses.entrySet()) {
Address mbr=entry.getKey();
Rsp<Results> rsp=entry.getValue();
Results result=rsp.getValue();
if(result != null) {
total_reqs+=result.num_gets + result.num_puts;
total_time+=result.total_time;
if(avg_gets == null)
avg_gets=result.avg_gets;
else
avg_gets.merge(result.avg_gets);
if(avg_puts == null)
avg_puts=result.avg_puts;
else
avg_puts.merge(result.avg_puts);
}
System.out.println(mbr + ": " + result);
}
double total_reqs_sec=total_reqs / ( total_time/ 1000.0);
double throughput=total_reqs_sec * BUFFER.length;
System.out.println("\n");
System.out.println(Util.bold(String.format("Throughput: %,.2f reqs/sec/node (%s/sec)\n" +
"Roundtrip: gets %s, puts %s\n",
total_reqs_sec, Util.printBytes(throughput),
print(avg_gets, print_details), print(avg_puts, print_details))));
System.out.println("\n\n");
}
static double parseReadPercentage() throws Exception {
double tmp=Util.readDoubleFromStdin("Read percentage: ");
if(tmp < 0 || tmp > 1.0) {
System.err.println("read percentage must be >= 0 or <= 1.0");
return -1;
}
return tmp;
}
int parseAnycastCount() throws Exception {
int tmp=Util.readIntFromStdin("Anycast count: ");
View tmp_view=channel.getView();
if(tmp > tmp_view.size()) {
System.err.println("anycast count must be smaller or equal to the view size (" + tmp_view + ")\n");
return -1;
}
return tmp;
}
protected void printView() {
System.out.printf("\n-- local: %s, view: %s\n", local_addr, view);
try {
System.in.skip(System.in.available());
}
catch(Exception ignored) {
}
}
protected static String print(AverageMinMax avg, boolean details) {
return details? String.format("min/avg/max = %,.2f/%,.2f/%,.2f us",
avg.min() / 1000.0, avg.average() / 1000.0, avg.max() / 1000.0) :
String.format("avg = %,.2f us", avg.average() / 1000.0);
}
private class Invoker extends Thread {
private final List<Address> dests=new ArrayList<>();
private final CountDownLatch latch;
private final AverageMinMax avg_gets=new AverageMinMax(), avg_puts=new AverageMinMax(); // in ns
private final List<Address> targets=new ArrayList<>(anycast_count);
private volatile boolean running=true;
public Invoker(Collection<Address> dests, CountDownLatch latch) {
this.latch=latch;
this.dests.addAll(dests);
setName("Invoker-" + COUNTER.getAndIncrement());
}
public AverageMinMax avgGets() {return avg_gets;}
public AverageMinMax avgPuts() {return avg_puts;}
public void cancel() {running=false;}
public void run() {
Object[] put_args={0, BUFFER};
Object[] get_args={0};
MethodCall get_call=new GetCall(GET, get_args);
MethodCall put_call=new PutCall(PUT, put_args);
RequestOptions get_options=new RequestOptions(ResponseMode.GET_ALL, 40000, false, null);
RequestOptions put_options=new RequestOptions(sync ? ResponseMode.GET_ALL : ResponseMode.GET_NONE, 40000, true, null);
if(oob) {
get_options.flags(Message.Flag.OOB);
put_options.flags(Message.Flag.OOB);
}
try {
latch.await();
}
catch(InterruptedException e) {
e.printStackTrace();
}
while(running) {
boolean get=Util.tossWeightedCoin(read_percentage);
try {
if(get) { // sync GET
Address target=pickTarget();
long start=System.nanoTime();
if(allow_local_gets && Objects.equals(target, local_addr))
get(1);
else {
disp.callRemoteMethod(target, get_call, get_options);
}
long get_time=System.nanoTime()-start;
avg_gets.add(get_time);
num_reads.increment();
}
else { // sync or async (based on value of 'sync') PUT
pickAnycastTargets(targets);
long start=System.nanoTime();
disp.callRemoteMethods(targets, put_call, put_options);
long put_time=System.nanoTime()-start;
targets.clear();
avg_puts.add(put_time);
num_writes.increment();
}
}
catch(Throwable throwable) {
throwable.printStackTrace();
}
}
}
private Address pickTarget() {
return Util.pickRandomElement(dests);
}
private void pickAnycastTargets(List<Address> anycast_targets) {
int index=dests.indexOf(local_addr);
for(int i=index + 1; i < index + 1 + anycast_count; i++) {
int new_index=i % dests.size();
Address tmp=dests.get(new_index);
if(!anycast_targets.contains(tmp))
anycast_targets.add(tmp);
}
}
}
public static void main(String[] args) throws Exception {
String name=null, bind_addr=null, mcast_addr="232.4.5.6";
boolean run_event_loop=true;
AddressGenerator addr_generator=null;
int port=7800, mcast_port=45566;
boolean udp=true;
String initial_hosts=null;
for(int i=0; i < args.length; i++) {
if("-name".equals(args[i])) {
name=args[++i];
continue;
}
if("-nohup".equals(args[i])) {
run_event_loop=false;
continue;
}
if("-uuid".equals(args[i])) {
addr_generator=new OneTimeAddressGenerator(Long.parseLong(args[++i]));
continue;
}
if("-port".equals(args[i])) {
port=Integer.parseInt(args[++i]);
continue;
}
if("-bind_addr".equals(args[i])) {
bind_addr=args[++i];
continue;
}
if("-tcp".equals(args[i])) {
udp=false;
continue;
}
if("-mcast_addr".equals(args[i])) {
mcast_addr=args[++i];
continue;
}
if("-mcast_port".equals(args[i])) {
mcast_port=Integer.parseInt(args[++i]);
continue;
}
if("-initial_hosts".equals(args[i])) {
initial_hosts=args[++i];
continue;
}
help();
return;
}
ProgrammaticUPerf test=null;
try {
test=new ProgrammaticUPerf();
test.init(name, addr_generator, bind_addr, port, udp, mcast_addr, mcast_port, initial_hosts);
if(run_event_loop)
test.startEventThread();
}
catch(Throwable ex) {
ex.printStackTrace();
if(test != null)
test.stop();
}
}
static void help() {
System.out.printf("%s [-name name] [-nohup] [-uuid <UUID>] [-port <bind port>] " +
"[-bind_addr bind-address] [-tcp] [-mcast_addr addr] [-mcast_port port]\n" +
"[-initial_hosts hosts]",
ProgrammaticUPerf.class.getSimpleName());
}
}
| |
/**
*/
package de.uni_hildesheim.sse.vil.expressions.expressionDsl.impl;
import de.uni_hildesheim.sse.vil.expressions.expressionDsl.Constant;
import de.uni_hildesheim.sse.vil.expressions.expressionDsl.ExpressionDslPackage;
import de.uni_hildesheim.sse.vil.expressions.expressionDsl.NumValue;
import de.uni_hildesheim.sse.vil.expressions.expressionDsl.QualifiedName;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.MinimalEObjectImpl;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Constant</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link de.uni_hildesheim.sse.vil.expressions.expressionDsl.impl.ConstantImpl#getNValue <em>NValue</em>}</li>
* <li>{@link de.uni_hildesheim.sse.vil.expressions.expressionDsl.impl.ConstantImpl#getSValue <em>SValue</em>}</li>
* <li>{@link de.uni_hildesheim.sse.vil.expressions.expressionDsl.impl.ConstantImpl#getQValue <em>QValue</em>}</li>
* <li>{@link de.uni_hildesheim.sse.vil.expressions.expressionDsl.impl.ConstantImpl#getBValue <em>BValue</em>}</li>
* <li>{@link de.uni_hildesheim.sse.vil.expressions.expressionDsl.impl.ConstantImpl#getNull <em>Null</em>}</li>
* <li>{@link de.uni_hildesheim.sse.vil.expressions.expressionDsl.impl.ConstantImpl#getVersion <em>Version</em>}</li>
* </ul>
*
* @generated
*/
public class ConstantImpl extends MinimalEObjectImpl.Container implements Constant
{
/**
* The cached value of the '{@link #getNValue() <em>NValue</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getNValue()
* @generated
* @ordered
*/
protected NumValue nValue;
/**
* The default value of the '{@link #getSValue() <em>SValue</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getSValue()
* @generated
* @ordered
*/
protected static final String SVALUE_EDEFAULT = null;
/**
* The cached value of the '{@link #getSValue() <em>SValue</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getSValue()
* @generated
* @ordered
*/
protected String sValue = SVALUE_EDEFAULT;
/**
* The cached value of the '{@link #getQValue() <em>QValue</em>}' containment reference.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getQValue()
* @generated
* @ordered
*/
protected QualifiedName qValue;
/**
* The default value of the '{@link #getBValue() <em>BValue</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getBValue()
* @generated
* @ordered
*/
protected static final String BVALUE_EDEFAULT = null;
/**
* The cached value of the '{@link #getBValue() <em>BValue</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getBValue()
* @generated
* @ordered
*/
protected String bValue = BVALUE_EDEFAULT;
/**
* The default value of the '{@link #getNull() <em>Null</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getNull()
* @generated
* @ordered
*/
protected static final String NULL_EDEFAULT = null;
/**
* The cached value of the '{@link #getNull() <em>Null</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getNull()
* @generated
* @ordered
*/
protected String null_ = NULL_EDEFAULT;
/**
* The default value of the '{@link #getVersion() <em>Version</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getVersion()
* @generated
* @ordered
*/
protected static final String VERSION_EDEFAULT = null;
/**
* The cached value of the '{@link #getVersion() <em>Version</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getVersion()
* @generated
* @ordered
*/
protected String version = VERSION_EDEFAULT;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ConstantImpl()
{
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass()
{
return ExpressionDslPackage.Literals.CONSTANT;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NumValue getNValue()
{
return nValue;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetNValue(NumValue newNValue, NotificationChain msgs)
{
NumValue oldNValue = nValue;
nValue = newNValue;
if (eNotificationRequired())
{
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, ExpressionDslPackage.CONSTANT__NVALUE, oldNValue, newNValue);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void setNValue(NumValue newNValue)
{
if (newNValue != nValue)
{
NotificationChain msgs = null;
if (nValue != null)
msgs = ((InternalEObject)nValue).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - ExpressionDslPackage.CONSTANT__NVALUE, null, msgs);
if (newNValue != null)
msgs = ((InternalEObject)newNValue).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - ExpressionDslPackage.CONSTANT__NVALUE, null, msgs);
msgs = basicSetNValue(newNValue, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ExpressionDslPackage.CONSTANT__NVALUE, newNValue, newNValue));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getSValue()
{
return sValue;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void setSValue(String newSValue)
{
String oldSValue = sValue;
sValue = newSValue;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ExpressionDslPackage.CONSTANT__SVALUE, oldSValue, sValue));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public QualifiedName getQValue()
{
return qValue;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain basicSetQValue(QualifiedName newQValue, NotificationChain msgs)
{
QualifiedName oldQValue = qValue;
qValue = newQValue;
if (eNotificationRequired())
{
ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, ExpressionDslPackage.CONSTANT__QVALUE, oldQValue, newQValue);
if (msgs == null) msgs = notification; else msgs.add(notification);
}
return msgs;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void setQValue(QualifiedName newQValue)
{
if (newQValue != qValue)
{
NotificationChain msgs = null;
if (qValue != null)
msgs = ((InternalEObject)qValue).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - ExpressionDslPackage.CONSTANT__QVALUE, null, msgs);
if (newQValue != null)
msgs = ((InternalEObject)newQValue).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - ExpressionDslPackage.CONSTANT__QVALUE, null, msgs);
msgs = basicSetQValue(newQValue, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ExpressionDslPackage.CONSTANT__QVALUE, newQValue, newQValue));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getBValue()
{
return bValue;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void setBValue(String newBValue)
{
String oldBValue = bValue;
bValue = newBValue;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ExpressionDslPackage.CONSTANT__BVALUE, oldBValue, bValue));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getNull()
{
return null_;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void setNull(String newNull)
{
String oldNull = null_;
null_ = newNull;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ExpressionDslPackage.CONSTANT__NULL, oldNull, null_));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getVersion()
{
return version;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void setVersion(String newVersion)
{
String oldVersion = version;
version = newVersion;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ExpressionDslPackage.CONSTANT__VERSION, oldVersion, version));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs)
{
switch (featureID)
{
case ExpressionDslPackage.CONSTANT__NVALUE:
return basicSetNValue(null, msgs);
case ExpressionDslPackage.CONSTANT__QVALUE:
return basicSetQValue(null, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType)
{
switch (featureID)
{
case ExpressionDslPackage.CONSTANT__NVALUE:
return getNValue();
case ExpressionDslPackage.CONSTANT__SVALUE:
return getSValue();
case ExpressionDslPackage.CONSTANT__QVALUE:
return getQValue();
case ExpressionDslPackage.CONSTANT__BVALUE:
return getBValue();
case ExpressionDslPackage.CONSTANT__NULL:
return getNull();
case ExpressionDslPackage.CONSTANT__VERSION:
return getVersion();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eSet(int featureID, Object newValue)
{
switch (featureID)
{
case ExpressionDslPackage.CONSTANT__NVALUE:
setNValue((NumValue)newValue);
return;
case ExpressionDslPackage.CONSTANT__SVALUE:
setSValue((String)newValue);
return;
case ExpressionDslPackage.CONSTANT__QVALUE:
setQValue((QualifiedName)newValue);
return;
case ExpressionDslPackage.CONSTANT__BVALUE:
setBValue((String)newValue);
return;
case ExpressionDslPackage.CONSTANT__NULL:
setNull((String)newValue);
return;
case ExpressionDslPackage.CONSTANT__VERSION:
setVersion((String)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID)
{
switch (featureID)
{
case ExpressionDslPackage.CONSTANT__NVALUE:
setNValue((NumValue)null);
return;
case ExpressionDslPackage.CONSTANT__SVALUE:
setSValue(SVALUE_EDEFAULT);
return;
case ExpressionDslPackage.CONSTANT__QVALUE:
setQValue((QualifiedName)null);
return;
case ExpressionDslPackage.CONSTANT__BVALUE:
setBValue(BVALUE_EDEFAULT);
return;
case ExpressionDslPackage.CONSTANT__NULL:
setNull(NULL_EDEFAULT);
return;
case ExpressionDslPackage.CONSTANT__VERSION:
setVersion(VERSION_EDEFAULT);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID)
{
switch (featureID)
{
case ExpressionDslPackage.CONSTANT__NVALUE:
return nValue != null;
case ExpressionDslPackage.CONSTANT__SVALUE:
return SVALUE_EDEFAULT == null ? sValue != null : !SVALUE_EDEFAULT.equals(sValue);
case ExpressionDslPackage.CONSTANT__QVALUE:
return qValue != null;
case ExpressionDslPackage.CONSTANT__BVALUE:
return BVALUE_EDEFAULT == null ? bValue != null : !BVALUE_EDEFAULT.equals(bValue);
case ExpressionDslPackage.CONSTANT__NULL:
return NULL_EDEFAULT == null ? null_ != null : !NULL_EDEFAULT.equals(null_);
case ExpressionDslPackage.CONSTANT__VERSION:
return VERSION_EDEFAULT == null ? version != null : !VERSION_EDEFAULT.equals(version);
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString()
{
if (eIsProxy()) return super.toString();
StringBuilder result = new StringBuilder(super.toString());
result.append(" (sValue: ");
result.append(sValue);
result.append(", bValue: ");
result.append(bValue);
result.append(", null: ");
result.append(null_);
result.append(", version: ");
result.append(version);
result.append(')');
return result.toString();
}
} //ConstantImpl
| |
/**
*
*/
package jkit.io.csv;
import infovis.util.Resource;
import java.io.IOException;
import java.io.Reader;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
/**
* An easy to use csv reader. During the reading events are generated and passed
* to the {@link CSVHandler} set by {@link #setHandler(CSVHandler)}.
*
* @author Joschi <josua.krause@googlemail.com>
*/
public class CSVReader {
/** The line delimiter -- assumed to be LF. */
private static final String NL = "\n";
/**
* The context of the CSV cursor.
*
* @author Joschi <josua.krause@googlemail.com>
*/
private final class Context implements CSVContext {
/** The column names. */
private final List<String> colNames;
/** Whether row names are stored. */
private final boolean hasRowNames;
/** The current column. */
private int col;
/** The current row. */
private int row;
/** The current row name if they are stored. */
private String rowName;
/**
* Creates a context.
*
* @param hasColNames Whether column names are present.
* @param hasRowNames Whether row names are present.
*/
public Context(final boolean hasColNames, final boolean hasRowNames) {
this.hasRowNames = hasRowNames;
colNames = hasColNames ? new LinkedList<String>() : null;
rowName = null;
row = hasColNames ? -1 : 0;
col = hasRowNames ? -1 : 0;
}
/**
* Adds a column name.
*
* @param name The name.
*/
public void addColName(final String name) {
colNames.add(name);
}
/**
* Sets the current row name.
*
* @param rowName The name.
*/
public void setRowName(final String rowName) {
this.rowName = rowName;
}
/** Advances the cell. */
public void nextCell() {
++col;
}
/** Advances the row. */
public void nextRow() {
++row;
col = hasRowNames ? -1 : 0;
}
@Override
public int col() {
return col;
}
@Override
public String colName() {
if(col < 0) return null;
return colNames != null ? colNames.get(col) : "" + col;
}
@Override
public int row() {
return row;
}
@Override
public String rowName() {
return rowName != null ? rowName : "" + row;
}
@Override
public CSVReader reader() {
return CSVReader.this;
}
@Override
public String toString() {
return "ctx[" + row + ":" + col + "](" + rowName() + ":"
+ colName() + ")";
}
}
/** The CSV delimiter. */
private final char delimiter;
/** The string indicator. */
private final char string;
/** Whether column titles are used. */
private boolean colTitle;
/** Whether row titles are used. */
private boolean rowTitle;
/** The currently installed handler. */
private CSVHandler handler;
/**
* Creates a csv reader with default delimiters. ';' for cells and '"' for
* strings.
*/
public CSVReader() {
this(';', '"');
}
/**
* Creates a csv reader.
*
* @param delimiter The cell delimiter.
* @param string The string delimiter.
*/
public CSVReader(final char delimiter, final char string) {
this(delimiter, string, false, false);
}
/**
* Creates a csv reader.
*
* @param delimiter The cell delimiter.
* @param string The string delimiter.
* @param columnTitles Whether to interpret the first row as column titles.
* @param rowTitles Wheter to interpret the first column of each row as row
* title.
*/
public CSVReader(final char delimiter, final char string,
final boolean columnTitles, final boolean rowTitles) {
this.delimiter = delimiter;
this.string = string;
rowTitle = rowTitles;
colTitle = columnTitles;
handler = null;
}
/**
* Creates a lazy representation of the rows of a resource.
*
* @param resource The resource.
* @param reader The CSV reader.
* @return A lazy collection of rows.
*/
public static final Iterable<CSVRow> readRows(
final Resource resource, final CSVReader reader) {
if(!resource.hasContent()) return null;
return new Iterable<CSVRow>() {
@Override
public Iterator<CSVRow> iterator() {
try {
return readRows(resource.reader(), reader);
} catch(final IOException e) {
throw new IllegalStateException(e);
}
}
};
}
/**
* Reads rows from a reader.
*
* @param r The reader.
* @param reader The CSV reader.
* @return A lazy iterator.
*/
public static final Iterator<CSVRow> readRows(final Reader r, final CSVReader reader) {
final Object lock = new Object();
return new Iterator<CSVRow>() {
private final CSVHandler handler = new CSVAdapter() {
private CSVRow current;
private int len;
@Override
public void colTitle(final CSVContext ctx, final String title) {
++len;
}
@Override
public void cell(final CSVContext ctx, final String content) {
final String name = reader.readColTitles() ? ctx.colName() : null;
final int i = ctx.col();
len = Math.max(i + 1, len);
if(current == null) {
current = new CSVRow(len);
}
current.addCell(i, name, content);
}
@Override
public void row(final CSVContext ctx) {
if(current != null) {
try {
rows.put(current);
synchronized(lock) {
lock.notifyAll();
}
} catch(final InterruptedException e) {
Thread.currentThread().interrupt();
}
current = null;
}
}
@Override
public void end(final CSVContext ctx) {
row(ctx);
finish = true;
synchronized(lock) {
lock.notifyAll();
}
}
};
// TODO maybe use something different than a blocking queue
protected final BlockingQueue<CSVRow> rows = new LinkedBlockingQueue<CSVRow>(2000);
protected volatile boolean finish = false;
{
final CSVHandler h = handler;
final Thread runner = new Thread() {
@Override
public void run() {
try {
reader.setHandler(h);
reader.read(r);
} catch(final IOException e) {
e.printStackTrace();
} finally {
finish = true;
synchronized(lock) {
lock.notifyAll();
}
}
}
};
runner.setDaemon(true);
runner.start();
fetchNext();
}
private CSVRow cur;
private void fetchNext() {
while((cur = rows.poll()) == null) {
if(finish) return;
try {
synchronized(lock) {
lock.wait(100);
}
} catch(final InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
@Override
public boolean hasNext() {
return cur != null;
}
@Override
public CSVRow next() {
final CSVRow row = cur;
fetchNext();
return row;
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
}
/**
* Reads from a reader.
*
* @param r The reader.
* @throws IOException If an I/O Exception occurs.
*/
public void read(final Reader r) throws IOException {
if(handler == null) throw new IllegalStateException(
"handler needs to be set first");
final CSVHandler hnd = handler;
final Context ctx = new Context(colTitle, rowTitle);
hnd.start(ctx);
char ignore = 0x0;
char line = 0x0;
boolean canString = true;
boolean isString = false;
boolean endString = false;
boolean afterLn = false;
StringBuilder current = null;
int i;
while((i = r.read()) != -1) {
if(current == null) {
current = new StringBuilder();
}
final char c = (char) i;
if(c == ignore && ignore != 0x0) {
continue;
}
afterLn = false;
if(line == 0x0 && (c == '\r' || c == '\n')) {
line = c;
ignore = (c == '\r') ? '\n' : '\r';
}
if(c == string) {
if(!endString) {
if(!isString) {
if(canString) {
isString = true;
continue;
}
} else {
endString = true;
continue;
}
}
endString = false;
} else if(endString) {
endString = false;
isString = false;
}
if(c == delimiter && !isString) {
handle(hnd, current.toString(), ctx);
current = null;
canString = true;
continue;
}
canString = false;
if(c == line) {
if(isString) {
current.append(NL);
} else {
handle(hnd, current.toString(), ctx);
line(ctx);
canString = true;
current = null;
}
afterLn = true;
continue;
}
current.append(c);
}
if(current != null && (current.length() > 0 || !afterLn)) {
handle(hnd, current.toString(), ctx);
}
hnd.end(ctx);
}
/**
* Handles a cell.
*
* @param hnd The handler.
* @param content The content.
* @param ctx The context.
*/
private static void handle(final CSVHandler hnd, final String content, final Context ctx) {
switch(ctx.col()) {
case -1:
if(ctx.row() < 0) {
break;
}
ctx.setRowName(content);
hnd.rowTitle(ctx, content);
break;
case 0:
if(ctx.row() >= 0) {
hnd.row(ctx);
}
// no break
//$FALL-THROUGH$
default:
if(ctx.row() < 0) {
ctx.addColName(content);
hnd.colTitle(ctx, content);
break;
}
hnd.cell(ctx, content);
break;
}
ctx.nextCell();
}
/**
* Advances the current line.
*
* @param ctx The context.
*/
private static void line(final Context ctx) {
ctx.nextRow();
}
/**
* Sets the csv handler.
*
* @param handler The handler.
*/
public void setHandler(final CSVHandler handler) {
this.handler = handler;
}
/**
* Setter.
*
* @param colTitle Whether to interpret the first row as column titles.
*/
public void setReadColTitles(final boolean colTitle) {
this.colTitle = colTitle;
}
/**
* Getter.
*
* @return Whether the first row is interpreted as column titles.
*/
public boolean readColTitles() {
return colTitle;
}
/**
* Setter.
*
* @param rowTitle Whether to interpret the first column as row titles.
*/
public void setReadRowTitles(final boolean rowTitle) {
this.rowTitle = rowTitle;
}
/**
* Getter.
*
* @return Whether the first column is interpreted as row titles.
*/
public boolean readRowTitles() {
return rowTitle;
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.vmwarecloudsimple.fluent;
import com.azure.core.annotation.ReturnType;
import com.azure.core.annotation.ServiceMethod;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.Response;
import com.azure.core.management.polling.PollResult;
import com.azure.core.util.Context;
import com.azure.core.util.polling.SyncPoller;
import com.azure.resourcemanager.vmwarecloudsimple.fluent.models.VirtualMachineInner;
import com.azure.resourcemanager.vmwarecloudsimple.models.PatchPayload;
import com.azure.resourcemanager.vmwarecloudsimple.models.StopMode;
import com.azure.resourcemanager.vmwarecloudsimple.models.VirtualMachineStopMode;
/** An instance of this class provides access to all the operations defined in VirtualMachinesClient. */
public interface VirtualMachinesClient {
/**
* Returns list virtual machine within subscription.
*
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of virtual machines.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<VirtualMachineInner> list();
/**
* Returns list virtual machine within subscription.
*
* @param filter The filter to apply on the list operation.
* @param top The maximum number of record sets to return.
* @param skipToken to be used by nextLink implementation.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of virtual machines.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<VirtualMachineInner> list(String filter, Integer top, String skipToken, Context context);
/**
* Returns list of virtual machine within resource group.
*
* @param resourceGroupName The name of the resource group.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of virtual machines.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<VirtualMachineInner> listByResourceGroup(String resourceGroupName);
/**
* Returns list of virtual machine within resource group.
*
* @param resourceGroupName The name of the resource group.
* @param filter The filter to apply on the list operation.
* @param top The maximum number of record sets to return.
* @param skipToken to be used by nextLink implementation.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return list of virtual machines.
*/
@ServiceMethod(returns = ReturnType.COLLECTION)
PagedIterable<VirtualMachineInner> listByResourceGroup(
String resourceGroupName, String filter, Integer top, String skipToken, Context context);
/**
* Get virtual machine.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual machine.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
VirtualMachineInner getByResourceGroup(String resourceGroupName, String virtualMachineName);
/**
* Get virtual machine.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual machine.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
Response<VirtualMachineInner> getByResourceGroupWithResponse(
String resourceGroupName, String virtualMachineName, Context context);
/**
* Create Or Update Virtual Machine.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @param virtualMachineRequest Create or Update Virtual Machine request.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual machine model.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<VirtualMachineInner>, VirtualMachineInner> beginCreateOrUpdate(
String resourceGroupName, String virtualMachineName, VirtualMachineInner virtualMachineRequest);
/**
* Create Or Update Virtual Machine.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @param virtualMachineRequest Create or Update Virtual Machine request.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual machine model.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<VirtualMachineInner>, VirtualMachineInner> beginCreateOrUpdate(
String resourceGroupName,
String virtualMachineName,
VirtualMachineInner virtualMachineRequest,
Context context);
/**
* Create Or Update Virtual Machine.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @param virtualMachineRequest Create or Update Virtual Machine request.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual machine model.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
VirtualMachineInner createOrUpdate(
String resourceGroupName, String virtualMachineName, VirtualMachineInner virtualMachineRequest);
/**
* Create Or Update Virtual Machine.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @param virtualMachineRequest Create or Update Virtual Machine request.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual machine model.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
VirtualMachineInner createOrUpdate(
String resourceGroupName,
String virtualMachineName,
VirtualMachineInner virtualMachineRequest,
Context context);
/**
* Delete virtual machine.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<Void>, Void> beginDelete(String resourceGroupName, String virtualMachineName);
/**
* Delete virtual machine.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<Void>, Void> beginDelete(
String resourceGroupName, String virtualMachineName, Context context);
/**
* Delete virtual machine.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
void delete(String resourceGroupName, String virtualMachineName);
/**
* Delete virtual machine.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
void delete(String resourceGroupName, String virtualMachineName, Context context);
/**
* Patch virtual machine properties.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @param virtualMachineRequest Patch virtual machine request.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual machine model.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<VirtualMachineInner>, VirtualMachineInner> beginUpdate(
String resourceGroupName, String virtualMachineName, PatchPayload virtualMachineRequest);
/**
* Patch virtual machine properties.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @param virtualMachineRequest Patch virtual machine request.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual machine model.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<VirtualMachineInner>, VirtualMachineInner> beginUpdate(
String resourceGroupName, String virtualMachineName, PatchPayload virtualMachineRequest, Context context);
/**
* Patch virtual machine properties.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @param virtualMachineRequest Patch virtual machine request.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual machine model.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
VirtualMachineInner update(String resourceGroupName, String virtualMachineName, PatchPayload virtualMachineRequest);
/**
* Patch virtual machine properties.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @param virtualMachineRequest Patch virtual machine request.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return virtual machine model.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
VirtualMachineInner update(
String resourceGroupName, String virtualMachineName, PatchPayload virtualMachineRequest, Context context);
/**
* Power on virtual machine.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<Void>, Void> beginStart(String resourceGroupName, String virtualMachineName);
/**
* Power on virtual machine.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<Void>, Void> beginStart(String resourceGroupName, String virtualMachineName, Context context);
/**
* Power on virtual machine.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
void start(String resourceGroupName, String virtualMachineName);
/**
* Power on virtual machine.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
void start(String resourceGroupName, String virtualMachineName, Context context);
/**
* Power off virtual machine, options: shutdown, poweroff, and suspend.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @param mode query stop mode parameter (reboot, shutdown, etc...).
* @param m body stop mode parameter (reboot, shutdown, etc...).
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<Void>, Void> beginStop(
String resourceGroupName, String virtualMachineName, StopMode mode, VirtualMachineStopMode m);
/**
* Power off virtual machine, options: shutdown, poweroff, and suspend.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @param mode query stop mode parameter (reboot, shutdown, etc...).
* @param m body stop mode parameter (reboot, shutdown, etc...).
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the completion.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
SyncPoller<PollResult<Void>, Void> beginStop(
String resourceGroupName, String virtualMachineName, StopMode mode, VirtualMachineStopMode m, Context context);
/**
* Power off virtual machine, options: shutdown, poweroff, and suspend.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @param mode query stop mode parameter (reboot, shutdown, etc...).
* @param m body stop mode parameter (reboot, shutdown, etc...).
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
void stop(String resourceGroupName, String virtualMachineName, StopMode mode, VirtualMachineStopMode m);
/**
* Power off virtual machine, options: shutdown, poweroff, and suspend.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
void stop(String resourceGroupName, String virtualMachineName);
/**
* Power off virtual machine, options: shutdown, poweroff, and suspend.
*
* @param resourceGroupName The name of the resource group.
* @param virtualMachineName virtual machine name.
* @param mode query stop mode parameter (reboot, shutdown, etc...).
* @param m body stop mode parameter (reboot, shutdown, etc...).
* @param context The context to associate with this operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
*/
@ServiceMethod(returns = ReturnType.SINGLE)
void stop(
String resourceGroupName, String virtualMachineName, StopMode mode, VirtualMachineStopMode m, Context context);
}
| |
//Copyright 2014 Zeno Futurista (zenofuturista@gmail.com)
//
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
package cz.zeno.miner;
import cz.zeno.miner.interfaces.Appender;
import cz.zeno.miner.interfaces.Server;
import cz.zeno.miner.interfaces.Work;
import cz.zeno.miner.interfaces.Worker;
import gnu.io.CommPort;
import gnu.io.CommPortIdentifier;
import gnu.io.SerialPort;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.codec.DecoderException;
import org.apache.commons.lang.ArrayUtils;
import sun.security.pkcs11.wrapper.Constants;
/**
*
* @author zeno
*/
public final class FpgaSerial implements Worker
{
SerialPort serialPort;
SerialReader sr;
Thread serailThread;
Server server;
Appender appender;
//this class is communication layer for sha miner (and future scrypt) miners
//it implements Worker interface - thus StratumClient can command it
//it needs two interfaces to be passed to constructor - Server for work related issues and Appender for status output
public FpgaSerial(String device, Server server, Appender appender) {
this.server = server;
this.appender = appender;
try {
//this is part of constructor, we do not need to do it manually
connect(device, 5000);
} catch (Exception ex) {
Logger.getLogger(FpgaSerial.class.getName()).log(Level.SEVERE, null, ex);
}
}
//open serial port which is connected to fpga device
//serial port parameters must match on the device side!
private void connect (String portName, int threshold) throws Exception
{
CommPortIdentifier portIdentifier = CommPortIdentifier.getPortIdentifier(portName);
if ( portIdentifier.isCurrentlyOwned() )
{
appender.append("Error: Port is currently in use");
}
else
{
//from RXTX examples
CommPort commPort = portIdentifier.open(this.getClass().getName(),2000);
if (commPort instanceof SerialPort)
{
//set up serial port
serialPort = (SerialPort) commPort;
serialPort.enableReceiveTimeout(threshold);
serialPort.setSerialPortParams(115200,SerialPort.DATABITS_8,SerialPort.STOPBITS_2,SerialPort.PARITY_NONE);
//start async reader
sr = new SerialReader(serialPort, server);
serailThread = new Thread(sr);
serailThread.start();
}
else
{
appender.append("Error: Only serial ports are handled by this example.");
}
}
}
//
boolean stopped = false;
boolean stop = false;
public void stop() throws InterruptedException, DecoderException
{
//schedule stop work
String stopWork = "";
for(int i = 0; i < 76; i++)
{
stopWork += "00";
}
//last nonce stops miner
stopWork += "FFFFFFFF";
//blank nonce
stopWork += "00000000" + "00000000";
stop = true;
scheduleWork(Utils.hexStringToByteArray(stopWork));
sr.stop();
serailThread.join();
serialPort.close();
}
private final Object submitLock = new Object();
LinkedHashMap<String, Work> jobToWork = new LinkedHashMap<>();
@Override
public void scheduleWork(Work w) {
//register work
jobToWork.put(w.getJobIDString(), w);
//drop some historical work
if(jobToWork.size() > 100)
{
for(int i = 0; i < 20; i++)
{
jobToWork.remove(jobToWork.keySet().iterator().next());
}
}
//send it to fpga
scheduleWork(w.getWork());
}
private void scheduleWork(byte[] binheader) {
synchronized(submitLock)
{
//bad header! take into account, that original bitcoin header is extended with target and jobID (additional 8B)
if(binheader.length != 88)
{
appender.append("wrong work data length" + Constants.NEWLINE);
return;
}
//stop switch, after this nothing can be scheduled
if(stopped)
return;
if(stop)
stopped = true;
//get new output stream and send data
OutputStream os = null;
try {
os = serialPort.getOutputStream();
for(int k = 0; k < 1; k++)
{
for(int i = 0; i < 88; i++)
{
os.write(binheader[i]);
}
}
os.flush();
//select jobID from header and print it
String out = "";
for(int i = 84; i < 88; i++)
{
out += String.format("%02X", binheader[i]);
}
appender.append("new job set :" + out + Constants.NEWLINE);
} catch (IOException ex) {
Logger.getLogger(FpgaSerial.class.getName()).log(Level.SEVERE, null, ex);
} finally {
try {
if(os != null)
os.close();
} catch (IOException ex) {
Logger.getLogger(FpgaSerial.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
}
//this reader listens to incomming traffic from fpga
private class SerialReader implements Runnable
{
boolean stop = false;
SerialPort serialPort;
InputStream in;
Server server;
public SerialReader (SerialPort serialPort,Server submitter) throws IOException
{
this.server = submitter;
this.serialPort = serialPort;
this.in = serialPort.getInputStream();
}
public void stop() {
this.stop = true;
try {
in.close();
} catch (IOException ex) {
Logger.getLogger(FpgaSerial.class.getName()).log(Level.SEVERE, null, ex);
}
}
@Override
public void run ()
{
//well, there is nothing special in this
//just parse incomming data, and decide wheter request new work or submit share
byte[] buffer = new byte[64];
int len;
try
{
int zerocounter = 0;
byte[] result = null;
while (!stop)
{
len = in.read(buffer);
if(len > 0)
{
//if there were some bytes left from last read, append them at the begining of next bytes received
if(result == null)
result = Arrays.copyOf(buffer, len);
else
result = ArrayUtils.addAll(result, Arrays.copyOf(buffer, len));
//TODO enhance stram parsing, for example if there are some remaining data, it screws results...
//so - recognize jobId pattern and align results again or something like that
if(result.length >= 8)
{
ByteBuffer bb = ByteBuffer.wrap(result);
for(int i = 0; i < 8; i++)
{
if(result[i] == 0)
{
zerocounter++;
}
}
//eight zeros mean that fpga run to end of nonce interval
if(zerocounter == 8)
// if(ByteBuffer.wrap(ArrayUtils.subarray(bb.array(), 4, 8)).getInt(0) == 0xffffffff)
{
server.requestNewWork(FpgaSerial.this);
}
//otherwise regular submit
// if(zerocounter != 8)
else
{
// String s = "";
// for(byte b : bb.array())
// {
// s += String.format("%02X", b);
// }
// System.out.println("submit " + s);
server.submitShare(jobToWork.get(Utils.byteArrayToHexString(ArrayUtils.subarray(bb.array(), 0, 4))), Utils.swapEndian(ArrayUtils.subarray(bb.array(), 4, 8)));
}
//store bytes which were out of 8B interval
if(result.length == 8)
result = null;
else
result = Arrays.copyOfRange(result, 8, result.length);
zerocounter = 0;
}
}
// Thread.sleep(50);
}
}
catch (IOException e)
{
if(!stop)
//unintentional
Logger.getLogger(SerialReader.class.getName()).log(Level.SEVERE, null, e);
}
finally
{
try {
in.close();
} catch (IOException ex) {
if(!stop)
//unintentional
Logger.getLogger(SerialReader.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
}
}
| |
/*
* Copyright 2015 MiLaboratory.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.milaboratory.test;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.type.TypeFactory;
import com.milaboratory.core.sequence.Alphabet;
import com.milaboratory.core.sequence.Sequence;
import com.milaboratory.core.sequence.SequenceBuilder;
import com.milaboratory.primitivio.PrimitivI;
import com.milaboratory.primitivio.PrimitivO;
import com.milaboratory.util.GlobalObjectMappers;
import com.milaboratory.util.RandomUtil;
import org.apache.commons.math3.random.RandomDataGenerator;
import org.apache.commons.math3.random.RandomGenerator;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Test;
import org.junit.internal.AssumptionViolatedException;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.text.DecimalFormat;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
public class TestUtil {
public static final DecimalFormat DECIMAL_FORMAT = new DecimalFormat("0.00");
public static boolean lt() {
return Objects.equals(System.getProperty("longTests"), "") ||
Objects.equals(System.getProperty("longTests"), "true") ||
Objects.equals(System.getProperty("longTest"), "") ||
Objects.equals(System.getProperty("longTest"), "true");
}
public static void assumeLongTest() {
Assume.assumeTrue(lt());
}
@Test
public void testLT() {
if (lt())
System.out.println("Long tests.");
else
System.out.println("Short tests.");
if (getProperties().isEmpty()) {
System.out.println("No system env properties.");
} else {
System.out.println("There are some system env properties.");
}
}
public static int its(int shortTest, int longTest) {
return lt() ? longTest : shortTest;
}
public static long its(long shortTest, long longTest) {
return lt() ? longTest : shortTest;
}
public static String env() {
String serverEnv = System.getProperty("serverEnv");
if (serverEnv == null)
return null;
File serverEnvPath = new File(serverEnv);
if (serverEnvPath.exists() && serverEnvPath.isDirectory()) {
String ret = serverEnvPath.getAbsolutePath();
if (!ret.endsWith(File.separator))
ret = ret + File.separator;
return ret;
} else
throw new IllegalArgumentException(serverEnv + " not exists.");
}
public static final String BIG_TEST_RESOURCE_PREFIX = "/big/";
public static String getBigTestResource(String file) {
return getBigTestResource(file, file);
}
public static String getBigTestResource(String name, String file) {
try {
URL resource = TestUtil.class.getResource(BIG_TEST_RESOURCE_PREFIX + file);
Assume.assumeTrue(resource != null);
Path path = Paths.get(resource.toURI()).toAbsolutePath().resolveSibling(name);
return path.toString();
} catch (AssumptionViolatedException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static void assertPrimitivIO(Object object) {
assertPrimitivIO(object, object.getClass());
}
public static void assertPrimitivIO(Object object, Class clazz) {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
PrimitivO o = new PrimitivO(bos);
o.writeObject(object);
ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
PrimitivI i = new PrimitivI(bis);
Object o1 = i.readObject(clazz);
Assert.assertEquals(object, o1);
}
public static void assertJson(Object object) {
assertJson(object, TypeFactory.defaultInstance().constructType(object.getClass()), false);
}
public static void assertJson(Object object, boolean sout) {
assertJson(object, TypeFactory.defaultInstance().constructType(object.getClass()), sout);
}
public static void assertJson(Object object, Class clazz) {
assertJson(object, TypeFactory.defaultInstance().constructType(clazz), false);
}
public static void assertJson(Object object, JavaType clazz) {
assertJson(object, clazz, false);
}
public static void assertJson(Object object, JavaType clazz, boolean sout) {
try {
String str = GlobalObjectMappers.PRETTY.writeValueAsString(object);
if (sout)
System.out.println(str);
Object deser = GlobalObjectMappers.PRETTY.readValue(str, clazz);
Assert.assertEquals(object, deser);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private static volatile Map<String, String> envProperties;
public static synchronized Map<String, String> getProperties() {
if (envProperties == null) {
String e = env();
if (e == null)
envProperties = Collections.EMPTY_MAP;
else {
File propsFile = new File(e + "properties.json");
if (!propsFile.exists())
envProperties = Collections.EMPTY_MAP;
else {
try {
envProperties = GlobalObjectMappers.ONE_LINE
.readValue(propsFile, new TypeReference<HashMap<String, String>>() {
});
} catch (IOException ex) {
envProperties = Collections.EMPTY_MAP;
}
}
}
}
return envProperties;
}
public static String time(long t) {
double v = t;
if ((t /= 1000) == 0)
return "" + DECIMAL_FORMAT.format(v) + "ns";
v /= 1000;
if ((t /= 1000) == 0)
return "" + DECIMAL_FORMAT.format(v) + "us";
v /= 1000;
if ((t /= 1000) == 0)
return "" + DECIMAL_FORMAT.format(v) + "ms";
v /= 1000;
if ((t /= 60) == 0)
return "" + DECIMAL_FORMAT.format(v) + "s";
v /= 60;
return "" + DECIMAL_FORMAT.format(v) + "m";
}
public static <S extends Sequence<S>> S randomSequence(Alphabet<S> alphabet,
int minLength, int maxLength) {
return randomSequence(alphabet, RandomUtil.getThreadLocalRandom(), minLength, maxLength);
}
public static <S extends Sequence<S>> S randomSequence(Alphabet<S> alphabet,
int minLength, int maxLength, boolean basicLettersOnly) {
return randomSequence(alphabet, RandomUtil.getThreadLocalRandom(), minLength, maxLength, basicLettersOnly);
}
public static <S extends Sequence<S>> S randomSequence(Alphabet<S> alphabet, RandomDataGenerator r,
int minLength, int maxLength) {
return randomSequence(alphabet, r.getRandomGenerator(), minLength, maxLength);
}
public static <S extends Sequence<S>> S randomSequence(Alphabet<S> alphabet, RandomDataGenerator r,
int minLength, int maxLength, boolean basicLettersOnly) {
return randomSequence(alphabet, r.getRandomGenerator(), minLength, maxLength, basicLettersOnly);
}
public static <S extends Sequence<S>> S randomSequence(Alphabet<S> alphabet, RandomGenerator r,
int minLength, int maxLength) {
return randomSequence(alphabet, r, minLength, maxLength, true);
}
public static <S extends Sequence<S>> S randomSequence(Alphabet<S> alphabet, RandomGenerator r,
int minLength, int maxLength, boolean basicLettersOnly) {
int length = minLength == maxLength ?
minLength : minLength + r.nextInt(maxLength - minLength + 1);
SequenceBuilder<S> builder = alphabet.createBuilder();
for (int i = 0; i < length; ++i)
builder.append((byte) r.nextInt(basicLettersOnly ? alphabet.basicSize() : alphabet.size()));
return builder.createAndDestroy();
}
}
| |
/*
* Copyright (c) 2013, OpenCloudDB/MyCAT and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software;Designed and Developed mainly by many Chinese
* opensource volunteers. you can redistribute it and/or modify it under the
* terms of the GNU General Public License version 2 only, as published by the
* Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Any questions about this component can be directed to it's project Web address
* https://code.google.com/p/opencloudb/.
*
*/
package com.akiban.sql.parser;
import com.akiban.sql.StandardException;
import java.util.Properties;
/**
* A JoinNode represents a join result set for either of the basic DML
* operations: SELECT and INSERT. For INSERT - SELECT, any of the
* fields in a JoinNode can be used (the JoinNode represents
* the (join) SELECT statement in the INSERT - SELECT). For INSERT,
* the resultColumns in the selectList will contain the names of the columns
* being inserted into or updated.
*
*/
public class JoinNode extends TableOperatorNode
{
/* Join semantics */
public static enum JoinType {
INNER, CROSS, LEFT_OUTER, RIGHT_OUTER, FULL_OUTER, UNION, STRAIGHT
}
/** If this flag is true, this node represents a natural join. */
private boolean naturalJoin;
private ValueNode joinClause;
private ResultColumnList usingClause;
//User provided optimizer overrides
private Properties joinOrderStrategyProperties;
/**
* Initializer for a JoinNode.
*
* @param leftResult The ResultSetNode on the left side of this join
* @param rightResult The ResultSetNode on the right side of this join
* @param onClause The ON clause
* @param usingClause The USING clause
* @param selectList The result column list for the join
* @param tableProperties Properties list associated with the table
* @param joinOrderStrategyProperties User provided optimizer overrides
*
* @exception StandardException Thrown on error
*/
public void init(Object leftResult,
Object rightResult,
Object onClause,
Object usingClause,
Object selectList,
Object tableProperties,
Object joinOrderStrategyProperties)
throws StandardException {
super.init(leftResult, rightResult, tableProperties);
resultColumns = (ResultColumnList)selectList;
joinClause = (ValueNode)onClause;
this.usingClause = (ResultColumnList)usingClause;
this.joinOrderStrategyProperties = (Properties)joinOrderStrategyProperties;
}
/**
* Fill this node with a deep copy of the given node.
*/
public void copyFrom(QueryTreeNode node) throws StandardException {
super.copyFrom(node);
JoinNode other = (JoinNode)node;
this.naturalJoin = other.naturalJoin;
this.joinClause = (ValueNode)getNodeFactory().copyNode(other.joinClause,
getParserContext());
this.usingClause = (ResultColumnList)getNodeFactory().copyNode(other.usingClause,
getParserContext());
this.joinOrderStrategyProperties = other.joinOrderStrategyProperties; // TODO: Clone?
}
/**
* Convert the joinType to a string.
*
* @param joinType The joinType as an enum.
*
* @return String The joinType as a String.
*/
public static String joinTypeToString(JoinType joinType) {
switch(joinType) {
case INNER:
return "INNER JOIN";
case CROSS:
return "CROSS JOIN";
case LEFT_OUTER:
return "LEFT OUTER JOIN";
case RIGHT_OUTER:
return "RIGHT OUTER JOIN";
case FULL_OUTER:
return "FULL OUTER JOIN";
case UNION:
return "UNION JOIN";
default:
assert false : "Unexpected joinType";
return null;
}
}
public ValueNode getJoinClause() {
return joinClause;
}
public void setJoinClause(ValueNode joinClause) {
this.joinClause = joinClause;
}
public ResultColumnList getUsingClause() {
return usingClause;
}
public void setUsingClause(ResultColumnList usingClause) {
this.usingClause = usingClause;
}
/**
* Convert this object to a String. See comments in QueryTreeNode.java
* for how this should be done for tree printing.
*
* @return This object as a String
*/
public String toString() {
String str = super.toString();
if (naturalJoin)
str = "naturalJoin: " + naturalJoin + "\n" + str;
if (joinOrderStrategyProperties != null)
str = "joinOrderStrategyProperties: " + joinOrderStrategyProperties + "\n" + str;
return str;
}
/**
* Prints the sub-nodes of this object. See QueryTreeNode.java for
* how tree printing is supposed to work.
*
* @param depth The depth of this node in the tree
*/
public void printSubNodes(int depth) {
super.printSubNodes(depth);
if (joinClause != null) {
printLabel(depth, "joinClause: ");
joinClause.treePrint(depth + 1);
}
if (usingClause != null) {
printLabel(depth, "usingClause: ");
usingClause.treePrint(depth + 1);
}
}
/**
* Flag this as a natural join so that an implicit USING clause will
* be generated in the bind phase.
*/
void setNaturalJoin() {
naturalJoin = true;
}
/** Is this a natural join? */
public boolean isNaturalJoin() {
return naturalJoin;
}
/**
* Return the logical left result set for this qualified
* join node.
* (For RIGHT OUTER JOIN, the left is the right
* and the right is the left and the JOIN is the NIOJ).
*/
public ResultSetNode getLogicalLeftResultSet() {
return leftResultSet;
}
/**
* Return the logical right result set for this qualified
* join node.
* (For RIGHT OUTER JOIN, the left is the right
* and the right is the left and the JOIN is the NIOJ).
*/
public ResultSetNode getLogicalRightResultSet() {
return rightResultSet;
}
/**
* Accept the visitor for all visitable children of this node.
*
* @param v the visitor
*
* @exception StandardException on error
*/
void acceptChildren(Visitor v) throws StandardException {
super.acceptChildren(v);
if (resultColumns != null) {
resultColumns = (ResultColumnList)resultColumns.accept(v);
}
if (joinClause != null) {
joinClause = (ValueNode)joinClause.accept(v);
}
if (usingClause != null) {
usingClause = (ResultColumnList)usingClause.accept(v);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.optimizer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.ql.exec.DependencyCollectionTask;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.exec.UnionOperator;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.plan.DependencyCollectionWork;
import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
import org.apache.hadoop.hive.ql.plan.MoveWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.TableDesc;
/**
* Processor Context for creating map reduce task. Walk the tree in a DFS manner
* and process the nodes. Some state is maintained about the current nodes
* visited so far.
*/
public class GenMRProcContext implements NodeProcessorCtx {
/**
* GenMapRedCtx is used to keep track of the current state.
*/
public static class GenMapRedCtx {
Task<?> currTask;
String currAliasId;
public GenMapRedCtx() {
}
/**
* @param currTask
* the current task
* @param currAliasId
*/
public GenMapRedCtx(Task<?> currTask, String currAliasId) {
this.currTask = currTask;
this.currAliasId = currAliasId;
}
/**
* @return current task
*/
public Task<?> getCurrTask() {
return currTask;
}
/**
* @return current alias
*/
public String getCurrAliasId() {
return currAliasId;
}
}
/**
* GenMRUnionCtx.
*
*/
public static class GenMRUnionCtx {
final Task<?> uTask;
List<String> taskTmpDir;
List<TableDesc> tt_desc;
List<TableScanOperator> listTopOperators;
public GenMRUnionCtx(Task<?> uTask) {
this.uTask = uTask;
taskTmpDir = new ArrayList<String>();
tt_desc = new ArrayList<TableDesc>();
listTopOperators = new ArrayList<>();
}
public Task<?> getUTask() {
return uTask;
}
public void addTaskTmpDir(String taskTmpDir) {
this.taskTmpDir.add(taskTmpDir);
}
public List<String> getTaskTmpDir() {
return taskTmpDir;
}
public void addTTDesc(TableDesc tt_desc) {
this.tt_desc.add(tt_desc);
}
public List<TableDesc> getTTDesc() {
return tt_desc;
}
public List<TableScanOperator> getListTopOperators() {
return listTopOperators;
}
public void addListTopOperators(TableScanOperator topOperator) {
listTopOperators.add(topOperator);
}
}
private HiveConf conf;
private
HashMap<Operator<? extends OperatorDesc>, Task<?>> opTaskMap;
private
HashMap<Task<?>, List<Operator<? extends OperatorDesc>>> taskToSeenOps;
private HashMap<UnionOperator, GenMRUnionCtx> unionTaskMap;
private List<FileSinkOperator> seenFileSinkOps;
private ParseContext parseCtx;
private List<Task<MoveWork>> mvTask;
private List<Task<?>> rootTasks;
private LinkedHashMap<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx;
private Task<?> currTask;
private TableScanOperator currTopOp;
private UnionOperator currUnionOp;
private String currAliasId;
private DependencyCollectionTask dependencyTaskForMultiInsert;
// If many fileSinkDescs are linked to each other, it is a good idea to keep track of
// tasks for first fileSinkDesc. others can use it
private Map<FileSinkDesc, Task<?>> linkedFileDescTasks;
/**
* Set of read entities. This list is generated by the walker and is passed to
* the hooks.
*/
private Set<ReadEntity> inputs;
/**
* Set of write entities. This list is generated by the walker and is passed
* to the hooks.
*/
private Set<WriteEntity> outputs;
public GenMRProcContext() {
}
/**
* @param conf
* hive configuration
* @param opTaskMap
* reducer to task mapping
* @param parseCtx
* current parse context
* @param rootTasks
* root tasks for the plan
* @param mvTask
* the final move task
* @param mapCurrCtx
* operator to task mappings
* @param inputs
* the set of input tables/partitions generated by the walk
* @param outputs
* the set of destinations generated by the walk
*/
public GenMRProcContext(
HiveConf conf,
HashMap<Operator<? extends OperatorDesc>, Task<?>> opTaskMap,
ParseContext parseCtx,
List<Task<MoveWork>> mvTask,
List<Task<?>> rootTasks,
LinkedHashMap<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx,
Set<ReadEntity> inputs, Set<WriteEntity> outputs) {
this.conf = conf;
this.opTaskMap = opTaskMap;
this.mvTask = mvTask;
this.parseCtx = parseCtx;
this.rootTasks = rootTasks;
this.mapCurrCtx = mapCurrCtx;
this.inputs = inputs;
this.outputs = outputs;
currTask = null;
currTopOp = null;
currUnionOp = null;
currAliasId = null;
unionTaskMap = new HashMap<UnionOperator, GenMRUnionCtx>();
taskToSeenOps = new HashMap<Task<?>,
List<Operator<? extends OperatorDesc>>>();
dependencyTaskForMultiInsert = null;
linkedFileDescTasks = null;
}
/**
* The context is reused across the rules. Reset so the following info is not
* incorrectly carried over to the following optimizations starting with the new TS.
*/
public void reset() {
currTask = null;
currTopOp = null;
currUnionOp = null;
currAliasId = null;
}
/**
* @return reducer to task mapping
*/
public HashMap<Operator<? extends OperatorDesc>,
Task<?>> getOpTaskMap() {
return opTaskMap;
}
/**
* @param opTaskMap
* reducer to task mapping
*/
public void setOpTaskMap(
HashMap<Operator<? extends OperatorDesc>, Task<?>> opTaskMap) {
this.opTaskMap = opTaskMap;
}
public boolean isSeenOp(Task task, Operator operator) {
List<Operator<?extends OperatorDesc>> seenOps = taskToSeenOps.get(task);
return seenOps != null && seenOps.contains(operator);
}
public void addSeenOp(Task task, Operator operator) {
List<Operator<?extends OperatorDesc>> seenOps = taskToSeenOps.get(task);
if (seenOps == null) {
taskToSeenOps.put(task, seenOps = new ArrayList<Operator<? extends OperatorDesc>>());
}
seenOps.add(operator);
}
/**
* @return file operators already visited
*/
public List<FileSinkOperator> getSeenFileSinkOps() {
return seenFileSinkOps;
}
/**
* @param seenFileSinkOps
* file sink operators already visited
*/
public void setSeenFileSinkOps(List<FileSinkOperator> seenFileSinkOps) {
this.seenFileSinkOps = seenFileSinkOps;
}
/**
* @return current parse context
*/
public ParseContext getParseCtx() {
return parseCtx;
}
/**
* @param parseCtx
* current parse context
*/
public void setParseCtx(ParseContext parseCtx) {
this.parseCtx = parseCtx;
}
/**
* @return the final move task
*/
public List<Task<MoveWork>> getMvTask() {
return mvTask;
}
/**
* @param mvTask
* the final move task
*/
public void setMvTask(List<Task<MoveWork>> mvTask) {
this.mvTask = mvTask;
}
/**
* @return root tasks for the plan
*/
public List<Task<?>> getRootTasks() {
return rootTasks;
}
/**
* @param rootTasks
* root tasks for the plan
*/
public void setRootTasks(List<Task<?>> rootTasks) {
this.rootTasks = rootTasks;
}
public boolean addRootIfPossible(Task<?> task) {
if (task.getParentTasks() == null || task.getParentTasks().isEmpty()) {
if (!rootTasks.contains(task)) {
return rootTasks.add(task);
}
}
return false;
}
/**
* @return operator to task mappings
*/
public LinkedHashMap<Operator<? extends OperatorDesc>, GenMapRedCtx> getMapCurrCtx() {
return mapCurrCtx;
}
/**
* @param mapCurrCtx
* operator to task mappings
*/
public void setMapCurrCtx(
LinkedHashMap<Operator<? extends OperatorDesc>, GenMapRedCtx> mapCurrCtx) {
this.mapCurrCtx = mapCurrCtx;
}
/**
* @return current task
*/
public Task<?> getCurrTask() {
return currTask;
}
/**
* @param currTask
* current task
*/
public void setCurrTask(Task<?> currTask) {
this.currTask = currTask;
}
/**
* @return current top operator
*/
public TableScanOperator getCurrTopOp() {
return currTopOp;
}
/**
* @param currTopOp
* current top operator
*/
public void setCurrTopOp(TableScanOperator currTopOp) {
this.currTopOp = currTopOp;
}
public UnionOperator getCurrUnionOp() {
return currUnionOp;
}
/**
* @param currUnionOp
* current union operator
*/
public void setCurrUnionOp(UnionOperator currUnionOp) {
this.currUnionOp = currUnionOp;
}
/**
* @return current top alias
*/
public String getCurrAliasId() {
return currAliasId;
}
/**
* @param currAliasId
* current top alias
*/
public void setCurrAliasId(String currAliasId) {
this.currAliasId = currAliasId;
}
public GenMRUnionCtx getUnionTask(UnionOperator op) {
return unionTaskMap.get(op);
}
public void setUnionTask(UnionOperator op, GenMRUnionCtx uTask) {
unionTaskMap.put(op, uTask);
}
/**
* Get the input set.
*/
public Set<ReadEntity> getInputs() {
return inputs;
}
/**
* Get the output set.
*/
public Set<WriteEntity> getOutputs() {
return outputs;
}
/**
* @return the conf
*/
public HiveConf getConf() {
return conf;
}
/**
* @param conf
* the conf to set
*/
public void setConf(HiveConf conf) {
this.conf = conf;
}
/**
* Returns dependencyTaskForMultiInsert initializing it if necessary.
*
* dependencyTaskForMultiInsert serves as a mutual dependency for the final move tasks in a
* multi-insert query.
*
* @return
*/
public DependencyCollectionTask getDependencyTaskForMultiInsert() {
if (dependencyTaskForMultiInsert == null) {
if (conf.getBoolVar(ConfVars.HIVE_MULTI_INSERT_MOVE_TASKS_SHARE_DEPENDENCIES)) {
dependencyTaskForMultiInsert =
(DependencyCollectionTask) TaskFactory.get(new DependencyCollectionWork());
}
}
return dependencyTaskForMultiInsert;
}
public Map<FileSinkDesc, Task<?>> getLinkedFileDescTasks() {
return linkedFileDescTasks;
}
public void setLinkedFileDescTasks(
Map<FileSinkDesc, Task<?>> linkedFileDescTasks) {
this.linkedFileDescTasks = linkedFileDescTasks;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE
* file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file
* to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.apache.kafka.clients.consumer.internals;
import org.apache.kafka.clients.ClientResponse;
import org.apache.kafka.clients.consumer.CommitType;
import org.apache.kafka.clients.consumer.ConsumerCommitCallback;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.Node;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.DisconnectException;
import org.apache.kafka.common.metrics.Measurable;
import org.apache.kafka.common.metrics.MetricConfig;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.metrics.Sensor;
import org.apache.kafka.common.metrics.stats.Avg;
import org.apache.kafka.common.metrics.stats.Count;
import org.apache.kafka.common.metrics.stats.Max;
import org.apache.kafka.common.metrics.stats.Rate;
import org.apache.kafka.common.protocol.ApiKeys;
import org.apache.kafka.common.protocol.Errors;
import org.apache.kafka.common.requests.ConsumerMetadataRequest;
import org.apache.kafka.common.requests.ConsumerMetadataResponse;
import org.apache.kafka.common.requests.HeartbeatRequest;
import org.apache.kafka.common.requests.HeartbeatResponse;
import org.apache.kafka.common.requests.JoinGroupRequest;
import org.apache.kafka.common.requests.JoinGroupResponse;
import org.apache.kafka.common.requests.OffsetCommitRequest;
import org.apache.kafka.common.requests.OffsetCommitResponse;
import org.apache.kafka.common.requests.OffsetFetchRequest;
import org.apache.kafka.common.requests.OffsetFetchResponse;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.common.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
/**
* This class manages the coordination process with the consumer coordinator.
*/
public final class Coordinator {
private static final Logger log = LoggerFactory.getLogger(Coordinator.class);
private final ConsumerNetworkClient client;
private final Time time;
private final String groupId;
private final Heartbeat heartbeat;
private final HeartbeatTask heartbeatTask;
private final int sessionTimeoutMs;
private final String assignmentStrategy;
private final SubscriptionState subscriptions;
private final CoordinatorMetrics sensors;
private final long requestTimeoutMs;
private final long retryBackoffMs;
private final RebalanceCallback rebalanceCallback;
private Node consumerCoordinator;
private String consumerId;
private int generation;
/**
* Initialize the coordination manager.
*/
public Coordinator(ConsumerNetworkClient client,
String groupId,
int sessionTimeoutMs,
int heartbeatIntervalMs,
String assignmentStrategy,
SubscriptionState subscriptions,
Metrics metrics,
String metricGrpPrefix,
Map<String, String> metricTags,
Time time,
long requestTimeoutMs,
long retryBackoffMs,
RebalanceCallback rebalanceCallback) {
this.client = client;
this.time = time;
this.generation = -1;
this.consumerId = JoinGroupRequest.UNKNOWN_CONSUMER_ID;
this.groupId = groupId;
this.consumerCoordinator = null;
this.subscriptions = subscriptions;
this.sessionTimeoutMs = sessionTimeoutMs;
this.assignmentStrategy = assignmentStrategy;
this.heartbeat = new Heartbeat(this.sessionTimeoutMs, heartbeatIntervalMs, time.milliseconds());
this.heartbeatTask = new HeartbeatTask();
this.sensors = new CoordinatorMetrics(metrics, metricGrpPrefix, metricTags);
this.requestTimeoutMs = requestTimeoutMs;
this.retryBackoffMs = retryBackoffMs;
this.rebalanceCallback = rebalanceCallback;
}
/**
* Refresh the committed offsets for provided partitions.
*/
public void refreshCommittedOffsetsIfNeeded() {
if (subscriptions.refreshCommitsNeeded()) {
Map<TopicPartition, Long> offsets = fetchCommittedOffsets(subscriptions.assignedPartitions());
for (Map.Entry<TopicPartition, Long> entry : offsets.entrySet()) {
TopicPartition tp = entry.getKey();
// verify assignment is still active
if (subscriptions.isAssigned(tp))
this.subscriptions.committed(tp, entry.getValue());
}
this.subscriptions.commitsRefreshed();
}
}
/**
* Fetch the current committed offsets from the coordinator for a set of partitions.
* @param partitions The partitions to fetch offsets for
* @return A map from partition to the committed offset
*/
public Map<TopicPartition, Long> fetchCommittedOffsets(Set<TopicPartition> partitions) {
while (true) {
ensureCoordinatorKnown();
ensurePartitionAssignment();
// contact coordinator to fetch committed offsets
RequestFuture<Map<TopicPartition, Long>> future = sendOffsetFetchRequest(partitions);
client.poll(future);
if (future.succeeded())
return future.value();
if (!future.isRetriable())
throw future.exception();
Utils.sleep(retryBackoffMs);
}
}
/**
* Ensure that we have a valid partition assignment from the coordinator.
*/
public void ensurePartitionAssignment() {
if (!subscriptions.partitionAssignmentNeeded())
return;
// execute the user's callback before rebalance
log.debug("Revoking previously assigned partitions {}", this.subscriptions.assignedPartitions());
try {
Set<TopicPartition> revoked = new HashSet<TopicPartition>(subscriptions.assignedPartitions());
rebalanceCallback.onPartitionsRevoked(revoked);
} catch (Exception e) {
log.error("User provided callback " + this.rebalanceCallback.getClass().getName()
+ " failed on partition revocation: ", e);
}
reassignPartitions();
// execute the user's callback after rebalance
log.debug("Setting newly assigned partitions {}", this.subscriptions.assignedPartitions());
try {
Set<TopicPartition> assigned = new HashSet<TopicPartition>(subscriptions.assignedPartitions());
rebalanceCallback.onPartitionsAssigned(assigned);
} catch (Exception e) {
log.error("User provided callback " + this.rebalanceCallback.getClass().getName()
+ " failed on partition assignment: ", e);
}
}
private void reassignPartitions() {
while (subscriptions.partitionAssignmentNeeded()) {
ensureCoordinatorKnown();
// ensure that there are no pending requests to the coordinator. This is important
// in particular to avoid resending a pending JoinGroup request.
if (client.pendingRequestCount(this.consumerCoordinator) > 0) {
client.awaitPendingRequests(this.consumerCoordinator);
continue;
}
RequestFuture<Void> future = sendJoinGroupRequest();
client.poll(future);
if (future.failed()) {
if (!future.isRetriable())
throw future.exception();
Utils.sleep(retryBackoffMs);
}
}
}
/**
* Block until the coordinator for this group is known.
*/
public void ensureCoordinatorKnown() {
while (coordinatorUnknown()) {
RequestFuture<Void> future = sendConsumerMetadataRequest();
client.poll(future, requestTimeoutMs);
if (future.failed())
client.awaitMetadataUpdate();
}
}
/**
* Commit offsets. This call blocks (regardless of commitType) until the coordinator
* can receive the commit request. Once the request has been made, however, only the
* synchronous commits will wait for a successful response from the coordinator.
* @param offsets Offsets to commit.
* @param commitType Commit policy
* @param callback Callback to be executed when the commit request finishes
*/
public void commitOffsets(Map<TopicPartition, Long> offsets, CommitType commitType, ConsumerCommitCallback callback) {
if (commitType == CommitType.ASYNC)
commitOffsetsAsync(offsets, callback);
else
commitOffsetsSync(offsets, callback);
}
private class HeartbeatTask implements DelayedTask {
public void reset() {
// start or restart the heartbeat task to be executed at the next chance
long now = time.milliseconds();
heartbeat.resetSessionTimeout(now);
client.unschedule(this);
client.schedule(this, now);
}
@Override
public void run(final long now) {
if (!subscriptions.partitionsAutoAssigned() ||
subscriptions.partitionAssignmentNeeded() ||
coordinatorUnknown())
// no need to send if we're not using auto-assignment or if we are
// awaiting a rebalance
return;
if (heartbeat.sessionTimeoutExpired(now)) {
// we haven't received a successful heartbeat in one session interval
// so mark the coordinator dead
coordinatorDead();
return;
}
if (!heartbeat.shouldHeartbeat(now)) {
// we don't need to heartbeat now, so reschedule for when we do
client.schedule(this, now + heartbeat.timeToNextHeartbeat(now));
} else {
heartbeat.sentHeartbeat(now);
RequestFuture<Void> future = sendHeartbeatRequest();
future.addListener(new RequestFutureListener<Void>() {
@Override
public void onSuccess(Void value) {
long now = time.milliseconds();
heartbeat.receiveHeartbeat(now);
long nextHeartbeatTime = now + heartbeat.timeToNextHeartbeat(now);
client.schedule(HeartbeatTask.this, nextHeartbeatTime);
}
@Override
public void onFailure(RuntimeException e) {
client.schedule(HeartbeatTask.this, time.milliseconds() + retryBackoffMs);
}
});
}
}
}
/**
* Send a request to get a new partition assignment. This is a non-blocking call which sends
* a JoinGroup request to the coordinator (if it is available). The returned future must
* be polled to see if the request completed successfully.
* @return A request future whose completion indicates the result of the JoinGroup request.
*/
private RequestFuture<Void> sendJoinGroupRequest() {
if (coordinatorUnknown())
return RequestFuture.coordinatorNotAvailable();
// send a join group request to the coordinator
List<String> subscribedTopics = new ArrayList<String>(subscriptions.subscribedTopics());
log.debug("(Re-)joining group {} with subscribed topics {}", groupId, subscribedTopics);
JoinGroupRequest request = new JoinGroupRequest(groupId,
this.sessionTimeoutMs,
subscribedTopics,
this.consumerId,
this.assignmentStrategy);
// create the request for the coordinator
log.debug("Issuing request ({}: {}) to coordinator {}", ApiKeys.JOIN_GROUP, request, this.consumerCoordinator.id());
return client.send(consumerCoordinator, ApiKeys.JOIN_GROUP, request)
.compose(new JoinGroupResponseHandler());
}
private class JoinGroupResponseHandler extends CoordinatorResponseHandler<JoinGroupResponse, Void> {
@Override
public JoinGroupResponse parse(ClientResponse response) {
return new JoinGroupResponse(response.responseBody());
}
@Override
public void handle(JoinGroupResponse joinResponse, RequestFuture<Void> future) {
// process the response
short errorCode = joinResponse.errorCode();
if (errorCode == Errors.NONE.code()) {
Coordinator.this.consumerId = joinResponse.consumerId();
Coordinator.this.generation = joinResponse.generationId();
// set the flag to refresh last committed offsets
subscriptions.needRefreshCommits();
log.debug("Joined group: {}", joinResponse.toStruct());
// record re-assignment time
sensors.partitionReassignments.record(response.requestLatencyMs());
// update partition assignment
subscriptions.changePartitionAssignment(joinResponse.assignedPartitions());
heartbeatTask.reset();
future.complete(null);
} else if (errorCode == Errors.UNKNOWN_CONSUMER_ID.code()) {
// reset the consumer id and retry immediately
Coordinator.this.consumerId = JoinGroupRequest.UNKNOWN_CONSUMER_ID;
log.info("Attempt to join group {} failed due to unknown consumer id, resetting and retrying.",
groupId);
future.raise(Errors.UNKNOWN_CONSUMER_ID);
} else if (errorCode == Errors.CONSUMER_COORDINATOR_NOT_AVAILABLE.code()
|| errorCode == Errors.NOT_COORDINATOR_FOR_CONSUMER.code()) {
// re-discover the coordinator and retry with backoff
coordinatorDead();
log.info("Attempt to join group {} failed due to obsolete coordinator information, retrying.",
groupId);
future.raise(Errors.forCode(errorCode));
} else if (errorCode == Errors.UNKNOWN_PARTITION_ASSIGNMENT_STRATEGY.code()
|| errorCode == Errors.INCONSISTENT_PARTITION_ASSIGNMENT_STRATEGY.code()
|| errorCode == Errors.INVALID_SESSION_TIMEOUT.code()) {
// log the error and re-throw the exception
Errors error = Errors.forCode(errorCode);
log.error("Attempt to join group {} failed due to: {}",
groupId, error.exception().getMessage());
future.raise(error);
} else {
// unexpected error, throw the exception
future.raise(new KafkaException("Unexpected error in join group response: "
+ Errors.forCode(joinResponse.errorCode()).exception().getMessage()));
}
}
}
private void commitOffsetsAsync(final Map<TopicPartition, Long> offsets, final ConsumerCommitCallback callback) {
this.subscriptions.needRefreshCommits();
RequestFuture<Void> future = sendOffsetCommitRequest(offsets);
if (callback != null) {
future.addListener(new RequestFutureListener<Void>() {
@Override
public void onSuccess(Void value) {
callback.onComplete(offsets, null);
}
@Override
public void onFailure(RuntimeException e) {
callback.onComplete(offsets, e);
}
});
}
}
private void commitOffsetsSync(Map<TopicPartition, Long> offsets, ConsumerCommitCallback callback) {
while (true) {
ensureCoordinatorKnown();
ensurePartitionAssignment();
RequestFuture<Void> future = sendOffsetCommitRequest(offsets);
client.poll(future);
if (future.succeeded()) {
if (callback != null)
callback.onComplete(offsets, null);
return;
}
if (!future.isRetriable()) {
if (callback == null)
throw future.exception();
else
callback.onComplete(offsets, future.exception());
return;
}
Utils.sleep(retryBackoffMs);
}
}
/**
* Commit offsets for the specified list of topics and partitions. This is a non-blocking call
* which returns a request future that can be polled in the case of a synchronous commit or ignored in the
* asynchronous case.
*
* @param offsets The list of offsets per partition that should be committed.
* @return A request future whose value indicates whether the commit was successful or not
*/
private RequestFuture<Void> sendOffsetCommitRequest(final Map<TopicPartition, Long> offsets) {
if (coordinatorUnknown())
return RequestFuture.coordinatorNotAvailable();
if (offsets.isEmpty())
return RequestFuture.voidSuccess();
// create the offset commit request
Map<TopicPartition, OffsetCommitRequest.PartitionData> offsetData;
offsetData = new HashMap<TopicPartition, OffsetCommitRequest.PartitionData>(offsets.size());
for (Map.Entry<TopicPartition, Long> entry : offsets.entrySet())
offsetData.put(entry.getKey(), new OffsetCommitRequest.PartitionData(entry.getValue(), ""));
OffsetCommitRequest req = new OffsetCommitRequest(this.groupId,
this.generation,
this.consumerId,
OffsetCommitRequest.DEFAULT_RETENTION_TIME,
offsetData);
return client.send(consumerCoordinator, ApiKeys.OFFSET_COMMIT, req)
.compose(new OffsetCommitResponseHandler(offsets));
}
private class OffsetCommitResponseHandler extends CoordinatorResponseHandler<OffsetCommitResponse, Void> {
private final Map<TopicPartition, Long> offsets;
public OffsetCommitResponseHandler(Map<TopicPartition, Long> offsets) {
this.offsets = offsets;
}
@Override
public OffsetCommitResponse parse(ClientResponse response) {
return new OffsetCommitResponse(response.responseBody());
}
@Override
public void handle(OffsetCommitResponse commitResponse, RequestFuture<Void> future) {
sensors.commitLatency.record(response.requestLatencyMs());
for (Map.Entry<TopicPartition, Short> entry : commitResponse.responseData().entrySet()) {
TopicPartition tp = entry.getKey();
long offset = this.offsets.get(tp);
short errorCode = entry.getValue();
if (errorCode == Errors.NONE.code()) {
log.debug("Committed offset {} for partition {}", offset, tp);
if (subscriptions.isAssigned(tp))
// update the local cache only if the partition is still assigned
subscriptions.committed(tp, offset);
} else if (errorCode == Errors.CONSUMER_COORDINATOR_NOT_AVAILABLE.code()
|| errorCode == Errors.NOT_COORDINATOR_FOR_CONSUMER.code()) {
coordinatorDead();
future.raise(Errors.forCode(errorCode));
return;
} else if (errorCode == Errors.OFFSET_METADATA_TOO_LARGE.code()
|| errorCode == Errors.INVALID_COMMIT_OFFSET_SIZE.code()) {
// do not need to throw the exception but just log the error
log.error("Error committing partition {} at offset {}: {}",
tp,
offset,
Errors.forCode(errorCode).exception().getMessage());
} else if (errorCode == Errors.UNKNOWN_CONSUMER_ID.code()
|| errorCode == Errors.ILLEGAL_GENERATION.code()) {
// need to re-join group
subscriptions.needReassignment();
future.raise(Errors.forCode(errorCode));
return;
} else {
// do not need to throw the exception but just log the error
future.raise(Errors.forCode(errorCode));
log.error("Error committing partition {} at offset {}: {}",
tp,
offset,
Errors.forCode(errorCode).exception().getMessage());
}
}
future.complete(null);
}
}
/**
* Fetch the committed offsets for a set of partitions. This is a non-blocking call. The
* returned future can be polled to get the actual offsets returned from the broker.
*
* @param partitions The set of partitions to get offsets for.
* @return A request future containing the committed offsets.
*/
private RequestFuture<Map<TopicPartition, Long>> sendOffsetFetchRequest(Set<TopicPartition> partitions) {
if (coordinatorUnknown())
return RequestFuture.coordinatorNotAvailable();
log.debug("Fetching committed offsets for partitions: {}", Utils.join(partitions, ", "));
// construct the request
OffsetFetchRequest request = new OffsetFetchRequest(this.groupId, new ArrayList<TopicPartition>(partitions));
// send the request with a callback
return client.send(consumerCoordinator, ApiKeys.OFFSET_FETCH, request)
.compose(new OffsetFetchResponseHandler());
}
private class OffsetFetchResponseHandler extends CoordinatorResponseHandler<OffsetFetchResponse, Map<TopicPartition, Long>> {
@Override
public OffsetFetchResponse parse(ClientResponse response) {
return new OffsetFetchResponse(response.responseBody());
}
@Override
public void handle(OffsetFetchResponse response, RequestFuture<Map<TopicPartition, Long>> future) {
Map<TopicPartition, Long> offsets = new HashMap<TopicPartition, Long>(response.responseData().size());
for (Map.Entry<TopicPartition, OffsetFetchResponse.PartitionData> entry : response.responseData().entrySet()) {
TopicPartition tp = entry.getKey();
OffsetFetchResponse.PartitionData data = entry.getValue();
if (data.hasError()) {
log.debug("Error fetching offset for topic-partition {}: {}", tp, Errors.forCode(data.errorCode)
.exception()
.getMessage());
if (data.errorCode == Errors.OFFSET_LOAD_IN_PROGRESS.code()) {
// just retry
future.raise(Errors.OFFSET_LOAD_IN_PROGRESS);
} else if (data.errorCode == Errors.NOT_COORDINATOR_FOR_CONSUMER.code()) {
// re-discover the coordinator and retry
coordinatorDead();
future.raise(Errors.NOT_COORDINATOR_FOR_CONSUMER);
} else if (data.errorCode == Errors.UNKNOWN_CONSUMER_ID.code()
|| data.errorCode == Errors.ILLEGAL_GENERATION.code()) {
// need to re-join group
subscriptions.needReassignment();
future.raise(Errors.forCode(data.errorCode));
} else {
future.raise(new KafkaException("Unexpected error in fetch offset response: "
+ Errors.forCode(data.errorCode).exception().getMessage()));
}
return;
} else if (data.offset >= 0) {
// record the position with the offset (-1 indicates no committed offset to fetch)
offsets.put(tp, data.offset);
} else {
log.debug("No committed offset for partition " + tp);
}
}
future.complete(offsets);
}
}
/**
* Send a heartbeat request now (visible only for testing).
*/
public RequestFuture<Void> sendHeartbeatRequest() {
HeartbeatRequest req = new HeartbeatRequest(this.groupId, this.generation, this.consumerId);
return client.send(consumerCoordinator, ApiKeys.HEARTBEAT, req)
.compose(new HeartbeatCompletionHandler());
}
public boolean coordinatorUnknown() {
return this.consumerCoordinator == null;
}
/**
* Discover the current coordinator for the consumer group. Sends a ConsumerMetadata request to
* one of the brokers. The returned future should be polled to get the result of the request.
* @return A request future which indicates the completion of the metadata request
*/
private RequestFuture<Void> sendConsumerMetadataRequest() {
// initiate the consumer metadata request
// find a node to ask about the coordinator
Node node = this.client.leastLoadedNode();
if (node == null) {
// TODO: If there are no brokers left, perhaps we should use the bootstrap set
// from configuration?
return RequestFuture.noBrokersAvailable();
} else {
// create a consumer metadata request
log.debug("Issuing consumer metadata request to broker {}", node.id());
ConsumerMetadataRequest metadataRequest = new ConsumerMetadataRequest(this.groupId);
return client.send(node, ApiKeys.CONSUMER_METADATA, metadataRequest)
.compose(new RequestFutureAdapter<ClientResponse, Void>() {
@Override
public void onSuccess(ClientResponse response, RequestFuture<Void> future) {
handleConsumerMetadataResponse(response, future);
}
});
}
}
private void handleConsumerMetadataResponse(ClientResponse resp, RequestFuture<Void> future) {
log.debug("Consumer metadata response {}", resp);
// parse the response to get the coordinator info if it is not disconnected,
// otherwise we need to request metadata update
if (resp.wasDisconnected()) {
future.raise(new DisconnectException());
} else if (!coordinatorUnknown()) {
// We already found the coordinator, so ignore the request
future.complete(null);
} else {
ConsumerMetadataResponse consumerMetadataResponse = new ConsumerMetadataResponse(resp.responseBody());
// use MAX_VALUE - node.id as the coordinator id to mimic separate connections
// for the coordinator in the underlying network client layer
// TODO: this needs to be better handled in KAFKA-1935
if (consumerMetadataResponse.errorCode() == Errors.NONE.code()) {
this.consumerCoordinator = new Node(Integer.MAX_VALUE - consumerMetadataResponse.node().id(),
consumerMetadataResponse.node().host(),
consumerMetadataResponse.node().port());
heartbeatTask.reset();
future.complete(null);
} else {
future.raise(Errors.forCode(consumerMetadataResponse.errorCode()));
}
}
}
/**
* Mark the current coordinator as dead.
*/
private void coordinatorDead() {
if (this.consumerCoordinator != null) {
log.info("Marking the coordinator {} dead.", this.consumerCoordinator.id());
this.consumerCoordinator = null;
}
}
private class HeartbeatCompletionHandler extends CoordinatorResponseHandler<HeartbeatResponse, Void> {
@Override
public HeartbeatResponse parse(ClientResponse response) {
return new HeartbeatResponse(response.responseBody());
}
@Override
public void handle(HeartbeatResponse heartbeatResponse, RequestFuture<Void> future) {
sensors.heartbeatLatency.record(response.requestLatencyMs());
short error = heartbeatResponse.errorCode();
if (error == Errors.NONE.code()) {
log.debug("Received successful heartbeat response.");
future.complete(null);
} else if (error == Errors.CONSUMER_COORDINATOR_NOT_AVAILABLE.code()
|| error == Errors.NOT_COORDINATOR_FOR_CONSUMER.code()) {
log.info("Attempt to heart beat failed since coordinator is either not started or not valid, marking it as dead.");
coordinatorDead();
future.raise(Errors.forCode(error));
} else if (error == Errors.ILLEGAL_GENERATION.code()) {
log.info("Attempt to heart beat failed since generation id is not legal, try to re-join group.");
subscriptions.needReassignment();
future.raise(Errors.ILLEGAL_GENERATION);
} else if (error == Errors.UNKNOWN_CONSUMER_ID.code()) {
log.info("Attempt to heart beat failed since consumer id is not valid, reset it and try to re-join group.");
consumerId = JoinGroupRequest.UNKNOWN_CONSUMER_ID;
subscriptions.needReassignment();
future.raise(Errors.UNKNOWN_CONSUMER_ID);
} else {
future.raise(new KafkaException("Unexpected error in heartbeat response: "
+ Errors.forCode(error).exception().getMessage()));
}
}
}
private abstract class CoordinatorResponseHandler<R, T>
extends RequestFutureAdapter<ClientResponse, T> {
protected ClientResponse response;
public abstract R parse(ClientResponse response);
public abstract void handle(R response, RequestFuture<T> future);
@Override
public void onSuccess(ClientResponse clientResponse, RequestFuture<T> future) {
this.response = clientResponse;
if (clientResponse.wasDisconnected()) {
int correlation = response.request().request().header().correlationId();
log.debug("Cancelled request {} with correlation id {} due to coordinator {} being disconnected",
response.request(),
correlation,
response.request().request().destination());
// mark the coordinator as dead
coordinatorDead();
future.raise(new DisconnectException());
return;
}
R response = parse(clientResponse);
handle(response, future);
}
@Override
public void onFailure(RuntimeException e, RequestFuture<T> future) {
if (e instanceof DisconnectException) {
log.debug("Coordinator request failed", e);
coordinatorDead();
}
future.raise(e);
}
}
public interface RebalanceCallback {
void onPartitionsAssigned(Collection<TopicPartition> partitions);
void onPartitionsRevoked(Collection<TopicPartition> partitions);
}
private class CoordinatorMetrics {
public final Metrics metrics;
public final String metricGrpName;
public final Sensor commitLatency;
public final Sensor heartbeatLatency;
public final Sensor partitionReassignments;
public CoordinatorMetrics(Metrics metrics, String metricGrpPrefix, Map<String, String> tags) {
this.metrics = metrics;
this.metricGrpName = metricGrpPrefix + "-coordinator-metrics";
this.commitLatency = metrics.sensor("commit-latency");
this.commitLatency.add(new MetricName("commit-latency-avg",
this.metricGrpName,
"The average time taken for a commit request",
tags), new Avg());
this.commitLatency.add(new MetricName("commit-latency-max",
this.metricGrpName,
"The max time taken for a commit request",
tags), new Max());
this.commitLatency.add(new MetricName("commit-rate",
this.metricGrpName,
"The number of commit calls per second",
tags), new Rate(new Count()));
this.heartbeatLatency = metrics.sensor("heartbeat-latency");
this.heartbeatLatency.add(new MetricName("heartbeat-response-time-max",
this.metricGrpName,
"The max time taken to receive a response to a hearbeat request",
tags), new Max());
this.heartbeatLatency.add(new MetricName("heartbeat-rate",
this.metricGrpName,
"The average number of heartbeats per second",
tags), new Rate(new Count()));
this.partitionReassignments = metrics.sensor("reassignment-latency");
this.partitionReassignments.add(new MetricName("reassignment-time-avg",
this.metricGrpName,
"The average time taken for a partition reassignment",
tags), new Avg());
this.partitionReassignments.add(new MetricName("reassignment-time-max",
this.metricGrpName,
"The max time taken for a partition reassignment",
tags), new Avg());
this.partitionReassignments.add(new MetricName("reassignment-rate",
this.metricGrpName,
"The number of partition reassignments per second",
tags), new Rate(new Count()));
Measurable numParts =
new Measurable() {
public double measure(MetricConfig config, long now) {
return subscriptions.assignedPartitions().size();
}
};
metrics.addMetric(new MetricName("assigned-partitions",
this.metricGrpName,
"The number of partitions currently assigned to this consumer",
tags),
numParts);
Measurable lastHeartbeat =
new Measurable() {
public double measure(MetricConfig config, long now) {
return TimeUnit.SECONDS.convert(now - heartbeat.lastHeartbeatSend(), TimeUnit.MILLISECONDS);
}
};
metrics.addMetric(new MetricName("last-heartbeat-seconds-ago",
this.metricGrpName,
"The number of seconds since the last controller heartbeat",
tags),
lastHeartbeat);
}
}
}
| |
/*
* $Id$
*/
/*
Copyright (c) 2000-2003 Board of Trustees of Leland Stanford Jr. University,
all rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of Stanford University shall not
be used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from Stanford University.
*/
package org.lockss.hasher;
import java.security.*;
import java.util.*;
import org.lockss.plugin.*;
import org.lockss.util.LockssSerializable;
/** Result of a single-block V3 hash, passed to the ContentHasher's
* HashBlockCallback. A block contains multiple versions, which can be
* iterated over or returned as an array. This array is sorted and iterated
* in the order <i>newest</i> to <i>oldest</i> version. */
public class HashBlock {
String url;
TreeSet versions;
long totalFilteredBytes = 0;
long totalUnfilteredBytes = 0;
long totalHashedBytes = 0;
public HashBlock(CachedUrl cu) {
this.versions = new TreeSet();
this.url = cu.getUrl();
}
public String getUrl() {
return url;
}
public String toString() {
return "[HBlock: " + getUrl() + "]";
}
public void addVersion(long unfilteredOffset,
long unfilteredLength,
long filteredOffset,
long filteredLength,
long bytesHashed,
MessageDigest[] digests,
int repositoryVersion,
Throwable hashError) {
versions.add(new HashBlock.Version(unfilteredOffset, unfilteredLength,
filteredOffset, filteredLength,
digests, repositoryVersion,
hashError));
totalFilteredBytes += filteredLength;
totalUnfilteredBytes += unfilteredLength;
totalHashedBytes += bytesHashed;
}
public int size() {
return versions.size();
}
/**
* @return the versions, sorted by the comparator.
*/
Version[] sortedVersions(Comparator<HashBlock.Version> comparator) {
HashBlock.Version[] versions = getVersions();
Arrays.sort(versions, comparator);
return versions;
}
/**
* Count the partitions of the versions, relative to the comparator.
*
* @param The comparator used to partition the versions.
* @return the number of unique values represented among the
* versions, as determined by the comparator.
*/
public int countUniqueVersions(Comparator<HashBlock.Version> comparator) {
HashBlock.Version[] versions = sortedVersions(comparator);
int count = 0;
if (versions.length > 0) {
count = 1;
for (int idx = 1; idx < versions.length; idx++) {
if (comparator.compare(versions[idx-1], versions[idx]) != 0) {
count++;
}
}
}
return count;
}
public long getTotalFilteredBytes() {
return totalFilteredBytes;
}
public long getTotalUnfilteredBytes() {
return totalUnfilteredBytes;
}
public long getTotalHashedBytes() {
return totalHashedBytes;
}
public Iterator versionIterator() {
return versions.iterator();
}
public HashBlock.Version currentVersion() {
if (versions.size() > 0) {
return (HashBlock.Version)versions.first();
} else {
return null;
}
}
public HashBlock.Version lastVersion() {
if (versions.size() > 0) {
return (HashBlock.Version)versions.last();
} else {
return null;
}
}
public HashBlock.Version[] getVersions() {
Version[] retVal =
(Version[])versions.toArray(new Version[versions.size()]);
return retVal;
}
/**
* Internal representation of a version of a hash block. Natural sort
* order is by repository version.
*
*/
public static class Version implements Comparable {
long filteredOffset;
long filteredLength;
long unfilteredOffset;
long unfilteredLength;
boolean endOfFile;
boolean lastVersion;
byte[][] hashes;
int repositoryVersion;
Throwable hashError;
public Version(long unfilteredOffset, long unfilteredLength,
long filteredOffset, long filteredLength,
MessageDigest[] digests, int repositoryVersion,
Throwable hashError) {
this.unfilteredOffset = unfilteredOffset;
this.unfilteredLength = unfilteredLength;
this.filteredOffset = filteredOffset;
this.filteredLength = filteredLength;
this.repositoryVersion = repositoryVersion;
this.hashError = hashError;
setDigests(digests);
}
public void setFilteredOffset(long offset) {
filteredOffset = offset;
}
public long getFilteredOffset() {
return filteredOffset;
}
public void setFilteredLength(long length) {
filteredLength = length;
}
public long getFilteredLength() {
return filteredLength;
}
public void setUnfilteredOffset(long offset) {
unfilteredOffset = offset;
}
public long getUnfilteredOffset() {
return unfilteredOffset;
}
public void setUnfilteredLength(long length) {
unfilteredLength = length;
}
public long getUnfilteredLength() {
return unfilteredLength;
}
public void setDigests(MessageDigest[] digests) {
int len = digests.length;
hashes = new byte[len][];
for (int i = 0; i < len; i++) {
hashes[i] = digests[i].digest();
}
}
public byte[][] getHashes() {
return hashes;
}
public void setHashError(Throwable t) {
hashError = t;
}
public Throwable getHashError() {
return hashError;
}
public void setEndOfFile(boolean val) {
endOfFile = val;
}
public boolean isEndOfFile() {
return endOfFile;
}
public void setLastVersion(boolean val) {
lastVersion = val;
}
public boolean isLastVersion() {
return lastVersion;
}
public boolean isWholeFile() {
return unfilteredOffset == 0 && endOfFile;
}
public int getRepositoryVersion() {
return repositoryVersion;
}
// This mess is necessary because BlockHasher may add versions in
// an arbitrary order, but we must be sure to iterate over them
// in a predictable order. The blocks are therefore held in a sorted
// set, ordered by repository version number, from most recent to least
// recent.
public boolean equals(Object o) {
if (!(o instanceof HashBlock.Version)) {
return false;
}
if (this == o) {
return true;
}
HashBlock.Version v = (HashBlock.Version)o;
return (this.endOfFile == v.endOfFile &&
this.lastVersion == v.lastVersion &&
this.filteredLength == v.filteredLength &&
this.filteredOffset == v.filteredOffset &&
this.unfilteredLength == v.unfilteredLength &&
this.unfilteredOffset == v.unfilteredOffset &&
this.repositoryVersion == v.repositoryVersion);
}
public int hashCode() {
int result = 17;
result = result * 37 + repositoryVersion;
result = result * 37 + (int)(unfilteredOffset ^ (unfilteredOffset >>> 32));
result = result * 37 + (int)(unfilteredLength ^ (unfilteredLength >>> 32));
result = result * 37 + (int)(filteredOffset ^ (filteredOffset >>> 32));
result = result * 37 + (int)(filteredLength ^ (filteredLength >>> 32));
result = result * 37 + (endOfFile ? 1 : 0);
result = result * 37 + (lastVersion ? 1 : 0);
return result;
}
public int compareTo(Object o) {
final int BEFORE = -1;
final int EQUAL = 0;
final int AFTER = 1;
if (!(o instanceof HashBlock.Version)) {
throw new ClassCastException();
}
if (this == o) {
return 0;
}
HashBlock.Version v = (HashBlock.Version)o;
if (this.repositoryVersion > v.repositoryVersion) {
return BEFORE;
}
if (this.repositoryVersion < v.repositoryVersion) {
return AFTER;
}
return EQUAL;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.compaction;
import java.util.*;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableMap;
import com.google.common.base.Predicate;
import com.google.common.collect.Iterables;
import com.google.common.util.concurrent.RateLimiter;
import org.apache.cassandra.io.sstable.format.SSTableReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.db.Memtable;
import org.apache.cassandra.db.lifecycle.LifecycleTransaction;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.io.sstable.Component;
import org.apache.cassandra.io.sstable.ISSTableScanner;
import org.apache.cassandra.utils.JVMStabilityInspector;
/**
* Pluggable compaction strategy determines how SSTables get merged.
*
* There are two main goals:
* - perform background compaction constantly as needed; this typically makes a tradeoff between
* i/o done by compaction, and merging done at read time.
* - perform a full (maximum possible) compaction if requested by the user
*/
public abstract class AbstractCompactionStrategy
{
private static final Logger logger = LoggerFactory.getLogger(AbstractCompactionStrategy.class);
protected static final float DEFAULT_TOMBSTONE_THRESHOLD = 0.2f;
// minimum interval needed to perform tombstone removal compaction in seconds, default 86400 or 1 day.
protected static final long DEFAULT_TOMBSTONE_COMPACTION_INTERVAL = 86400;
protected static final boolean DEFAULT_UNCHECKED_TOMBSTONE_COMPACTION_OPTION = false;
protected static final String TOMBSTONE_THRESHOLD_OPTION = "tombstone_threshold";
protected static final String TOMBSTONE_COMPACTION_INTERVAL_OPTION = "tombstone_compaction_interval";
// disable range overlap check when deciding if an SSTable is candidate for tombstone compaction (CASSANDRA-6563)
protected static final String UNCHECKED_TOMBSTONE_COMPACTION_OPTION = "unchecked_tombstone_compaction";
protected static final String COMPACTION_ENABLED = "enabled";
public Map<String, String> options;
protected final ColumnFamilyStore cfs;
protected float tombstoneThreshold;
protected long tombstoneCompactionInterval;
protected boolean uncheckedTombstoneCompaction;
protected boolean disableTombstoneCompactions = false;
/**
* pause/resume/getNextBackgroundTask must synchronize. This guarantees that after pause completes,
* no new tasks will be generated; or put another way, pause can't run until in-progress tasks are
* done being created.
*
* This allows runWithCompactionsDisabled to be confident that after pausing, once in-progress
* tasks abort, it's safe to proceed with truncate/cleanup/etc.
*
* See CASSANDRA-3430
*/
protected boolean isActive = false;
protected volatile boolean enabled = true;
protected AbstractCompactionStrategy(ColumnFamilyStore cfs, Map<String, String> options)
{
assert cfs != null;
this.cfs = cfs;
this.options = ImmutableMap.copyOf(options);
/* checks must be repeated here, as user supplied strategies might not call validateOptions directly */
try
{
validateOptions(options);
String optionValue = options.get(TOMBSTONE_THRESHOLD_OPTION);
tombstoneThreshold = optionValue == null ? DEFAULT_TOMBSTONE_THRESHOLD : Float.parseFloat(optionValue);
optionValue = options.get(TOMBSTONE_COMPACTION_INTERVAL_OPTION);
tombstoneCompactionInterval = optionValue == null ? DEFAULT_TOMBSTONE_COMPACTION_INTERVAL : Long.parseLong(optionValue);
optionValue = options.get(UNCHECKED_TOMBSTONE_COMPACTION_OPTION);
uncheckedTombstoneCompaction = optionValue == null ? DEFAULT_UNCHECKED_TOMBSTONE_COMPACTION_OPTION : Boolean.parseBoolean(optionValue);
if (!shouldBeEnabled())
this.disable();
}
catch (ConfigurationException e)
{
logger.warn("Error setting compaction strategy options ({}), defaults will be used", e.getMessage());
tombstoneThreshold = DEFAULT_TOMBSTONE_THRESHOLD;
tombstoneCompactionInterval = DEFAULT_TOMBSTONE_COMPACTION_INTERVAL;
uncheckedTombstoneCompaction = DEFAULT_UNCHECKED_TOMBSTONE_COMPACTION_OPTION;
}
}
/**
* For internal, temporary suspension of background compactions so that we can do exceptional
* things like truncate or major compaction
*/
public synchronized void pause()
{
isActive = false;
}
/**
* For internal, temporary suspension of background compactions so that we can do exceptional
* things like truncate or major compaction
*/
public synchronized void resume()
{
isActive = true;
}
/**
* Performs any extra initialization required
*/
public void startup()
{
isActive = true;
}
/**
* Releases any resources if this strategy is shutdown (when the CFS is reloaded after a schema change).
*/
public void shutdown()
{
isActive = false;
}
/**
* @param gcBefore throw away tombstones older than this
*
* @return the next background/minor compaction task to run; null if nothing to do.
*
* Is responsible for marking its sstables as compaction-pending.
*/
public abstract AbstractCompactionTask getNextBackgroundTask(final int gcBefore);
/**
* @param gcBefore throw away tombstones older than this
*
* @return a compaction task that should be run to compact this columnfamilystore
* as much as possible. Null if nothing to do.
*
* Is responsible for marking its sstables as compaction-pending.
*/
public abstract Collection<AbstractCompactionTask> getMaximalTask(final int gcBefore, boolean splitOutput);
/**
* @param sstables SSTables to compact. Must be marked as compacting.
* @param gcBefore throw away tombstones older than this
*
* @return a compaction task corresponding to the requested sstables.
* Will not be null. (Will throw if user requests an invalid compaction.)
*
* Is responsible for marking its sstables as compaction-pending.
*/
public abstract AbstractCompactionTask getUserDefinedTask(Collection<SSTableReader> sstables, final int gcBefore);
public AbstractCompactionTask getCompactionTask(LifecycleTransaction txn, final int gcBefore, long maxSSTableBytes)
{
return new CompactionTask(cfs, txn, gcBefore, false);
}
/**
* @return the number of background tasks estimated to still be needed for this columnfamilystore
*/
public abstract int getEstimatedRemainingTasks();
/**
* @return size in bytes of the largest sstables for this strategy
*/
public abstract long getMaxSSTableBytes();
public boolean isEnabled()
{
return this.enabled && this.isActive;
}
public void enable()
{
this.enabled = true;
}
public void disable()
{
this.enabled = false;
}
/**
* @return whether or not MeteredFlusher should be able to trigger memtable flushes for this CF.
*/
public boolean isAffectedByMeteredFlusher()
{
return true;
}
/**
* If not affected by MeteredFlusher (and handling flushing on its own), override to tell MF how much
* space to reserve for this CF, i.e., how much space to subtract from `memtable_total_space_in_mb` when deciding
* if other memtables should be flushed or not.
*/
public long getMemtableReservedSize()
{
return 0;
}
/**
* Handle a flushed memtable.
*
* @param memtable the flushed memtable
* @param sstable the written sstable. can be null if the memtable was clean.
*/
public void replaceFlushed(Memtable memtable, SSTableReader sstable)
{
cfs.getTracker().replaceFlushed(memtable, sstable);
if (sstable != null)
CompactionManager.instance.submitBackground(cfs);
}
/**
* @return a subset of the suggested sstables that are relevant for read requests.
*/
public List<SSTableReader> filterSSTablesForReads(List<SSTableReader> sstables)
{
return sstables;
}
/**
* Filters SSTables that are to be blacklisted from the given collection
*
* @param originalCandidates The collection to check for blacklisted SSTables
* @return list of the SSTables with blacklisted ones filtered out
*/
public static Iterable<SSTableReader> filterSuspectSSTables(Iterable<SSTableReader> originalCandidates)
{
return Iterables.filter(originalCandidates, new Predicate<SSTableReader>()
{
public boolean apply(SSTableReader sstable)
{
return !sstable.isMarkedSuspect();
}
});
}
/**
* Returns a list of KeyScanners given sstables and a range on which to scan.
* The default implementation simply grab one SSTableScanner per-sstable, but overriding this method
* allow for a more memory efficient solution if we know the sstable don't overlap (see
* LeveledCompactionStrategy for instance).
*/
@SuppressWarnings("resource")
public ScannerList getScanners(Collection<SSTableReader> sstables, Range<Token> range)
{
RateLimiter limiter = CompactionManager.instance.getRateLimiter();
ArrayList<ISSTableScanner> scanners = new ArrayList<ISSTableScanner>();
try
{
for (SSTableReader sstable : sstables)
scanners.add(sstable.getScanner(range, limiter));
}
catch (Throwable t)
{
try
{
new ScannerList(scanners).close();
}
catch (Throwable t2)
{
t.addSuppressed(t2);
}
throw t;
}
return new ScannerList(scanners);
}
public boolean shouldDefragment()
{
return false;
}
public String getName()
{
return getClass().getSimpleName();
}
public synchronized void replaceSSTables(Collection<SSTableReader> removed, Collection<SSTableReader> added)
{
for (SSTableReader remove : removed)
removeSSTable(remove);
for (SSTableReader add : added)
addSSTable(add);
}
public abstract void addSSTable(SSTableReader added);
public abstract void removeSSTable(SSTableReader sstable);
public static class ScannerList implements AutoCloseable
{
public final List<ISSTableScanner> scanners;
public ScannerList(List<ISSTableScanner> scanners)
{
this.scanners = scanners;
}
public void close()
{
Throwable t = null;
for (ISSTableScanner scanner : scanners)
{
try
{
scanner.close();
}
catch (Throwable t2)
{
JVMStabilityInspector.inspectThrowable(t2);
if (t == null)
t = t2;
else
t.addSuppressed(t2);
}
}
if (t != null)
throw Throwables.propagate(t);
}
}
public ScannerList getScanners(Collection<SSTableReader> toCompact)
{
return getScanners(toCompact, null);
}
/**
* Check if given sstable is worth dropping tombstones at gcBefore.
* Check is skipped if tombstone_compaction_interval time does not elapse since sstable creation and returns false.
*
* @param sstable SSTable to check
* @param gcBefore time to drop tombstones
* @return true if given sstable's tombstones are expected to be removed
*/
protected boolean worthDroppingTombstones(SSTableReader sstable, int gcBefore)
{
if (disableTombstoneCompactions)
return false;
// since we use estimations to calculate, there is a chance that compaction will not drop tombstones actually.
// if that happens we will end up in infinite compaction loop, so first we check enough if enough time has
// elapsed since SSTable created.
if (System.currentTimeMillis() < sstable.getCreationTimeFor(Component.DATA) + tombstoneCompactionInterval * 1000)
return false;
double droppableRatio = sstable.getEstimatedDroppableTombstoneRatio(gcBefore);
if (droppableRatio <= tombstoneThreshold)
return false;
//sstable range overlap check is disabled. See CASSANDRA-6563.
if (uncheckedTombstoneCompaction)
return true;
Collection<SSTableReader> overlaps = cfs.getOverlappingSSTables(Collections.singleton(sstable));
if (overlaps.isEmpty())
{
// there is no overlap, tombstones are safely droppable
return true;
}
else if (CompactionController.getFullyExpiredSSTables(cfs, Collections.singleton(sstable), overlaps, gcBefore).size() > 0)
{
return true;
}
else
{
// what percentage of columns do we expect to compact outside of overlap?
if (sstable.getIndexSummarySize() < 2)
{
// we have too few samples to estimate correct percentage
return false;
}
// first, calculate estimated keys that do not overlap
long keys = sstable.estimatedKeys();
Set<Range<Token>> ranges = new HashSet<Range<Token>>(overlaps.size());
for (SSTableReader overlap : overlaps)
ranges.add(new Range<>(overlap.first.getToken(), overlap.last.getToken()));
long remainingKeys = keys - sstable.estimatedKeysForRanges(ranges);
// next, calculate what percentage of columns we have within those keys
long columns = sstable.getEstimatedColumnCount().mean() * remainingKeys;
double remainingColumnsRatio = ((double) columns) / (sstable.getEstimatedColumnCount().count() * sstable.getEstimatedColumnCount().mean());
// return if we still expect to have droppable tombstones in rest of columns
return remainingColumnsRatio * droppableRatio > tombstoneThreshold;
}
}
public static Map<String, String> validateOptions(Map<String, String> options) throws ConfigurationException
{
String threshold = options.get(TOMBSTONE_THRESHOLD_OPTION);
if (threshold != null)
{
try
{
float thresholdValue = Float.parseFloat(threshold);
if (thresholdValue < 0)
{
throw new ConfigurationException(String.format("%s must be greater than 0, but was %f", TOMBSTONE_THRESHOLD_OPTION, thresholdValue));
}
}
catch (NumberFormatException e)
{
throw new ConfigurationException(String.format("%s is not a parsable int (base10) for %s", threshold, TOMBSTONE_THRESHOLD_OPTION), e);
}
}
String interval = options.get(TOMBSTONE_COMPACTION_INTERVAL_OPTION);
if (interval != null)
{
try
{
long tombstoneCompactionInterval = Long.parseLong(interval);
if (tombstoneCompactionInterval < 0)
{
throw new ConfigurationException(String.format("%s must be greater than 0, but was %d", TOMBSTONE_COMPACTION_INTERVAL_OPTION, tombstoneCompactionInterval));
}
}
catch (NumberFormatException e)
{
throw new ConfigurationException(String.format("%s is not a parsable int (base10) for %s", interval, TOMBSTONE_COMPACTION_INTERVAL_OPTION), e);
}
}
String unchecked = options.get(UNCHECKED_TOMBSTONE_COMPACTION_OPTION);
if (unchecked != null)
{
if (!unchecked.equalsIgnoreCase("true") && !unchecked.equalsIgnoreCase("false"))
throw new ConfigurationException(String.format("'%s' should be either 'true' or 'false', not '%s'",UNCHECKED_TOMBSTONE_COMPACTION_OPTION, unchecked));
}
String compactionEnabled = options.get(COMPACTION_ENABLED);
if (compactionEnabled != null)
{
if (!compactionEnabled.equalsIgnoreCase("true") && !compactionEnabled.equalsIgnoreCase("false"))
{
throw new ConfigurationException(String.format("enabled should either be 'true' or 'false', not %s", compactionEnabled));
}
}
Map<String, String> uncheckedOptions = new HashMap<String, String>(options);
uncheckedOptions.remove(TOMBSTONE_THRESHOLD_OPTION);
uncheckedOptions.remove(TOMBSTONE_COMPACTION_INTERVAL_OPTION);
uncheckedOptions.remove(UNCHECKED_TOMBSTONE_COMPACTION_OPTION);
uncheckedOptions.remove(COMPACTION_ENABLED);
return uncheckedOptions;
}
public boolean shouldBeEnabled()
{
String optionValue = options.get(COMPACTION_ENABLED);
return optionValue == null || Boolean.parseBoolean(optionValue);
}
/**
* Method for grouping similar SSTables together, This will be used by
* anti-compaction to determine which SSTables should be anitcompacted
* as a group. If a given compaction strategy creates sstables which
* cannot be merged due to some constraint it must override this method.
*/
public Collection<Collection<SSTableReader>> groupSSTablesForAntiCompaction(Collection<SSTableReader> sstablesToGroup)
{
int groupSize = 2;
List<SSTableReader> sortedSSTablesToGroup = new ArrayList<>(sstablesToGroup);
Collections.sort(sortedSSTablesToGroup, SSTableReader.sstableComparator);
Collection<Collection<SSTableReader>> groupedSSTables = new ArrayList<>();
Collection<SSTableReader> currGroup = new ArrayList<>();
for (SSTableReader sstable : sortedSSTablesToGroup)
{
currGroup.add(sstable);
if (currGroup.size() == groupSize)
{
groupedSSTables.add(currGroup);
currGroup = new ArrayList<>();
}
}
if (currGroup.size() != 0)
groupedSSTables.add(currGroup);
return groupedSSTables;
}
}
| |
package it.unibz.inf.ontop.protege.action;
/*
* #%L
* ontop-protege
* %%
* Copyright (C) 2009 - 2013 KRDB Research Centre. Free University of Bozen Bolzano.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.google.common.collect.ImmutableList;
import com.google.inject.Injector;
import it.unibz.inf.ontop.dbschema.ImmutableMetadata;
import it.unibz.inf.ontop.dbschema.MetadataProvider;
import it.unibz.inf.ontop.dbschema.NamedRelationDefinition;
import it.unibz.inf.ontop.dbschema.RelationID;
import it.unibz.inf.ontop.dbschema.impl.CachingMetadataLookup;
import it.unibz.inf.ontop.dbschema.impl.JDBCMetadataProviderFactory;
import it.unibz.inf.ontop.model.term.functionsymbol.db.BnodeStringTemplateFunctionSymbol;
import it.unibz.inf.ontop.protege.core.*;
import it.unibz.inf.ontop.protege.mapping.DuplicateTriplesMapException;
import it.unibz.inf.ontop.protege.utils.DialogUtils;
import it.unibz.inf.ontop.protege.utils.SwingWorkerWithCompletionPercentageMonitor;
import it.unibz.inf.ontop.spec.mapping.bootstrap.impl.DirectMappingEngine;
import it.unibz.inf.ontop.spec.mapping.pp.SQLPPTriplesMap;
import org.protege.editor.core.ui.action.ProtegeAction;
import org.semanticweb.owlapi.model.OWLDeclarationAxiom;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.sql.Connection;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicInteger;
import static it.unibz.inf.ontop.protege.utils.DialogUtils.HTML_TAB;
public class BootstrapAction extends ProtegeAction {
private static final long serialVersionUID = 8671527155950905524L;
private static final Logger LOGGER = LoggerFactory.getLogger(BootstrapAction.class);
private static final String DIALOG_TITLE = "Bootstrapping ontology and mapping";
@Override
public void actionPerformed(ActionEvent evt) {
OBDAModel obdaModel = OBDAEditorKitSynchronizerPlugin.getCurrentOBDAModel(getEditorKit());
OntologyPrefixManager prefixManager = obdaModel.getMutablePrefixManager();
String defaultBaseIRI = prefixManager.getDefaultIriPrefix()
.replace("#", "/");
JPanel panel = new JPanel();
panel.setLayout(new BoxLayout(panel, BoxLayout.PAGE_AXIS));
JLabel baseIriLabel = new JLabel("Base IRI - the prefix " +
"to be used for all generated classes and properties: ");
baseIriLabel.setAlignmentX(Component.LEFT_ALIGNMENT);
panel.add(baseIriLabel);
panel.add(Box.createRigidArea(new Dimension(10, 10)));
JTextField baseIriField = new JTextField(defaultBaseIRI);
baseIriField.setAlignmentX(Component.LEFT_ALIGNMENT);
panel.add(baseIriField);
panel.add(Box.createRigidArea(new Dimension(20, 20)));
if (JOptionPane.showOptionDialog(getWorkspace(),
panel,
DIALOG_TITLE,
JOptionPane.OK_CANCEL_OPTION,
JOptionPane.QUESTION_MESSAGE,
DialogUtils.getOntopIcon(),
null,
null) != JOptionPane.OK_OPTION)
return;
String baseIri0 = baseIriField.getText().trim();
if (baseIri0.contains("#")) {
DialogUtils.showPrettyMessageDialog(getWorkspace(),
"Base IRIs cannot contain '#':\n" +
baseIri0 + " is not a valid base IRI.",
DIALOG_TITLE);
return;
}
String baseIri = DirectMappingEngine.fixBaseURI(
baseIri0.isEmpty() ? defaultBaseIRI : baseIri0);
prefixManager.generateUniquePrefixForBootstrapper(baseIri);
BootstrapWorker worker = new BootstrapWorker(baseIri);
worker.execute();
}
private class BootstrapWorker extends SwingWorkerWithCompletionPercentageMonitor<ImmutableList<SQLPPTriplesMap>, Void> {
private final String baseIri;
private final JDBCMetadataProviderFactory metadataProviderFactory;
private final DirectMappingEngine directMappingEngine;
private final OBDAModel obdaModel;
private final AtomicInteger currentMappingIndex;
BootstrapWorker(String baseIri) {
super(getWorkspace(),
"<html><h3>Bootstrapping ontology and mapping:</h3></html>");
this.baseIri = baseIri;
obdaModel = OBDAEditorKitSynchronizerPlugin.getCurrentOBDAModel(getEditorKit());
Injector injector = obdaModel.getOntopConfiguration().getInjector();
this.metadataProviderFactory = injector.getInstance(JDBCMetadataProviderFactory.class);
this.directMappingEngine = injector.getInstance(DirectMappingEngine.class);
this.currentMappingIndex = new AtomicInteger(obdaModel.getTriplesMapManager().size() + 1);
}
@Override
protected ImmutableList<SQLPPTriplesMap> doInBackground() throws Exception {
start("initializing...");
final ImmutableMetadata metadata;
try (Connection conn = obdaModel.getDataSource().getConnection()) {
MetadataProvider metadataProvider = metadataProviderFactory.getMetadataProvider(conn);
ImmutableList<RelationID> relationIds = metadataProvider.getRelationIDs();
setMaxTicks(relationIds.size() * 2);
startLoop(this::getCompletionPercentage, () -> String.format("%d%% completed.", getCompletionPercentage()));
CachingMetadataLookup lookup = new CachingMetadataLookup(metadataProvider);
for (RelationID id : relationIds) {
lookup.getRelation(id);
tick();
}
metadata = lookup.extractImmutableMetadata();
}
Map<NamedRelationDefinition, BnodeStringTemplateFunctionSymbol> bnodeTemplateMap = new HashMap<>();
ImmutableList.Builder<SQLPPTriplesMap> builder = ImmutableList.builder();
for (NamedRelationDefinition relation : metadata.getAllRelations()) {
builder.addAll(directMappingEngine
.getMapping(relation, baseIri, bnodeTemplateMap, currentMappingIndex));
tick();
}
endLoop("");
ImmutableList<SQLPPTriplesMap> triplesMaps = builder.build();
end();
return triplesMaps;
}
@Override
public void done() {
try {
ImmutableList<SQLPPTriplesMap> triplesMaps = complete();
// TODO: move back to doInBackground?
Set<OWLDeclarationAxiom> axioms = obdaModel.insertTriplesMaps(triplesMaps, true);
DialogUtils.showInfoDialog(getWorkspace(),
"<html><h3>Bootstrapping the ontology and mapping is complete.</h3><br>" +
HTML_TAB + "<b>" + triplesMaps.size() + "</b> triples maps inserted into the mapping.<br>" +
HTML_TAB + "<b>" + axioms.size() + "</b> declaration axioms (re)inserted into the ontology.<br></html>",
DIALOG_TITLE);
}
catch (DuplicateTriplesMapException e) {
LOGGER.error("Internal error:", e);
}
catch (CancellationException | InterruptedException e) {
DialogUtils.showCancelledActionDialog(getWorkspace(), DIALOG_TITLE);
}
catch (ExecutionException e) {
DialogUtils.showErrorDialog(getWorkspace(), DIALOG_TITLE, DIALOG_TITLE + " error.", LOGGER, e, obdaModel.getDataSource());
}
}
}
@Override
public void initialise() { /* NO-OP */ }
@Override
public void dispose() {/* NO-OP */ }
}
| |
package org.sagebionetworks.repo.model.dbo.statistics;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.time.YearMonth;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.stream.IntStream;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.sagebionetworks.repo.model.dbo.statistics.StatisticsMonthlyProjectFilesDAO;
import org.sagebionetworks.repo.model.statistics.FileEvent;
import org.sagebionetworks.repo.model.statistics.monthly.StatisticsMonthlyUtils;
import org.sagebionetworks.repo.model.statistics.project.StatisticsMonthlyProjectFiles;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
@ExtendWith(SpringExtension.class)
@ContextConfiguration(locations = { "classpath:jdomodels-test-context.xml" })
public class StatisticsMonthlyProjectFilesDAOTest {
private static final Integer TEST_FILES_COUNT = 10000;
private static final Integer TEST_USERS_COUNT = 200;
@Autowired
private StatisticsMonthlyProjectFilesDAO dao;
@BeforeEach
public void before() {
dao.clear();
}
@AfterEach
public void after() {
dao.clear();
}
@Test
public void testGetMonthlyProjectFileStatisticsAbsent() {
Long projectId = 1L;
YearMonth month = YearMonth.of(2019, 8);
// Call under test
Optional<StatisticsMonthlyProjectFiles> result = dao.getProjectFilesStatistics(projectId, FileEvent.FILE_DOWNLOAD, month);
assertFalse(result.isPresent());
}
@Test
public void testGetMonthlyProjectFilesStatistics() {
Long projectId = 1L;
YearMonth month = YearMonth.of(2019, 8);
FileEvent eventType = FileEvent.FILE_DOWNLOAD;
StatisticsMonthlyProjectFiles stats = new StatisticsMonthlyProjectFiles();
stats.setProjectId(projectId);
stats.setMonth(month);
stats.setEventType(eventType);
stats.setFilesCount(TEST_FILES_COUNT);
stats.setUsersCount(TEST_USERS_COUNT);
dao.save(Collections.singletonList(stats));
// Call under test
Optional<StatisticsMonthlyProjectFiles> result = dao.getProjectFilesStatistics(projectId, FileEvent.FILE_DOWNLOAD, month);
assertTrue(result.isPresent());
StatisticsMonthlyProjectFiles dto = result.get();
assertEquals(projectId, dto.getProjectId());
assertEquals(month, dto.getMonth());
assertEquals(eventType, dto.getEventType());
assertEquals(TEST_FILES_COUNT, dto.getFilesCount());
assertEquals(TEST_USERS_COUNT, dto.getUsersCount());
assertNotNull(dto.getLastUpdatedOn());
}
@Test
public void testGetMonthlyProjectFilesStatisticsInRangeWithInvalidRange() {
Long projectId = 1L;
FileEvent eventType = FileEvent.FILE_DOWNLOAD;
YearMonth from = YearMonth.of(2019, 9);
YearMonth to = YearMonth.of(2019, 8);
Assertions.assertThrows(IllegalArgumentException.class, () -> {
// Call under test
dao.getProjectFilesStatisticsInRange(projectId, eventType, from, to);
});
}
@Test
public void testGetMonthlyProjectFilesStatisticsInRange() {
Long projectId = 1L;
FileEvent eventType = FileEvent.FILE_DOWNLOAD;
List<YearMonth> months = StatisticsMonthlyUtils.generatePastMonths(5);
List<StatisticsMonthlyProjectFiles> batch = new ArrayList<>();
for (YearMonth month : months) {
StatisticsMonthlyProjectFiles dto = new StatisticsMonthlyProjectFiles();
dto.setProjectId(projectId);
dto.setEventType(eventType);
dto.setMonth(month);
dto.setFilesCount(TEST_FILES_COUNT);
dto.setUsersCount(TEST_USERS_COUNT);
batch.add(dto);
}
dao.save(batch);
YearMonth from = months.get(0);
YearMonth to = months.get(months.size() - 1);
// Call under test
List<StatisticsMonthlyProjectFiles> result = dao.getProjectFilesStatisticsInRange(projectId, eventType, from, to);
assertEquals(batch.size(), result.size());
for (int i=0; i<batch.size(); i++) {
StatisticsMonthlyProjectFiles expected = batch.get(i);
StatisticsMonthlyProjectFiles actual = result.get(i);
assertNotNull(actual.getLastUpdatedOn());
expected.setLastUpdatedOn(actual.getLastUpdatedOn());
assertEquals(expected, actual);
}
}
@Test
public void testSaveBatch() {
int projectsNumber = 200;
YearMonth month = YearMonth.of(2019, 8);
FileEvent eventType = FileEvent.FILE_DOWNLOAD;
List<StatisticsMonthlyProjectFiles> batch = getBatch(eventType, month, projectsNumber);
// Call under test
dao.save(batch);
Long count = dao.countProjectsInRange(eventType, month, month);
assertEquals(projectsNumber, count);
for (int projectId = 0; projectId < projectsNumber; projectId++) {
assertCounts(Long.valueOf(projectId), eventType, month, TEST_FILES_COUNT, TEST_USERS_COUNT);
}
}
@Test
public void testSaveBatchOverrideAll() {
int projectsNumber = 10;
YearMonth month = YearMonth.of(2019, 8);
FileEvent eventType = FileEvent.FILE_DOWNLOAD;
List<StatisticsMonthlyProjectFiles> batch = getBatch(eventType, month, projectsNumber);
// First saves a batch
dao.save(batch);
int newFilesCount = TEST_FILES_COUNT - 5;
int newUsersCount = TEST_USERS_COUNT - 5;
batch = getBatch(eventType, month, projectsNumber, newFilesCount, newUsersCount);
// Call under test
dao.save(batch);
Long count = dao.countProjectsInRange(eventType, month, month);
assertEquals(projectsNumber, count);
for (int projectId = 0; projectId < projectsNumber; projectId++) {
assertCounts(Long.valueOf(projectId), eventType, month, newFilesCount, newUsersCount);
}
}
@Test
public void testSaveBatchOverridePartial() {
int projectsNumber = 10;
YearMonth month = YearMonth.of(2019, 8);
FileEvent eventType = FileEvent.FILE_DOWNLOAD;
List<StatisticsMonthlyProjectFiles> batch = getBatch(eventType, month, projectsNumber);
// First saves a batch
dao.save(batch);
int overrideProjectsNumber = 5;
int newFilesCount = TEST_FILES_COUNT - 5;
int newUsersCount = TEST_USERS_COUNT - 5;
batch = getBatch(eventType, month, overrideProjectsNumber, newFilesCount, newUsersCount);
// Call under test
dao.save(batch);
Long count = dao.countProjectsInRange(eventType, month, month);
assertEquals(projectsNumber, count);
for (int projectId = 0; projectId < overrideProjectsNumber; projectId++) {
assertCounts(Long.valueOf(projectId), eventType, month, newFilesCount, newUsersCount);
}
for (int projectId = overrideProjectsNumber; projectId < projectsNumber; projectId++) {
assertCounts(Long.valueOf(projectId), eventType, month, TEST_FILES_COUNT, TEST_USERS_COUNT);
}
}
private void assertCounts(Long projectId, FileEvent eventType, YearMonth month, Integer filesCount, Integer usersCount) {
Optional<StatisticsMonthlyProjectFiles> result = dao.getProjectFilesStatistics(Long.valueOf(projectId), eventType, month);
assertTrue(result.isPresent());
StatisticsMonthlyProjectFiles stats = result.get();
assertEquals(filesCount, stats.getFilesCount());
assertEquals(usersCount, stats.getUsersCount());
}
private List<StatisticsMonthlyProjectFiles> getBatch(FileEvent eventType, YearMonth month, int projectsNumber) {
return getBatch(eventType, month, projectsNumber, TEST_FILES_COUNT, TEST_USERS_COUNT);
}
private List<StatisticsMonthlyProjectFiles> getBatch(FileEvent eventType, YearMonth month, int projectsNumber, int filesCount,
int usersCount) {
List<StatisticsMonthlyProjectFiles> batch = new ArrayList<>();
IntStream.range(0, projectsNumber).forEach(index -> {
StatisticsMonthlyProjectFiles dto = new StatisticsMonthlyProjectFiles();
dto.setProjectId(Long.valueOf(index));
dto.setEventType(eventType);
dto.setMonth(month);
dto.setFilesCount(filesCount);
dto.setUsersCount(usersCount);
batch.add(dto);
});
return batch;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.operator.aggregation;
import com.google.common.base.Joiner;
import com.google.common.primitives.Floats;
import io.airlift.stats.QuantileDigest;
import io.trino.metadata.Metadata;
import io.trino.operator.scalar.AbstractTestFunctions;
import io.trino.spi.Page;
import io.trino.spi.block.Block;
import io.trino.spi.type.SqlVarbinary;
import io.trino.spi.type.StandardTypes;
import io.trino.spi.type.Type;
import io.trino.sql.tree.QualifiedName;
import org.testng.annotations.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.LongStream;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static io.trino.block.BlockAssertions.createBlockOfReals;
import static io.trino.block.BlockAssertions.createDoubleSequenceBlock;
import static io.trino.block.BlockAssertions.createDoublesBlock;
import static io.trino.block.BlockAssertions.createLongSequenceBlock;
import static io.trino.block.BlockAssertions.createLongsBlock;
import static io.trino.block.BlockAssertions.createRLEBlock;
import static io.trino.block.BlockAssertions.createSequenceBlockOfReal;
import static io.trino.metadata.MetadataManager.createTestMetadataManager;
import static io.trino.operator.aggregation.AggregationTestUtils.assertAggregation;
import static io.trino.operator.aggregation.FloatingPointBitsConverterUtil.doubleToSortableLong;
import static io.trino.operator.aggregation.FloatingPointBitsConverterUtil.floatToSortableInt;
import static io.trino.operator.aggregation.TestMergeQuantileDigestFunction.QDIGEST_EQUALITY;
import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.spi.type.BooleanType.BOOLEAN;
import static io.trino.spi.type.DoubleType.DOUBLE;
import static io.trino.spi.type.RealType.REAL;
import static io.trino.spi.type.TypeSignature.arrayType;
import static io.trino.sql.analyzer.TypeSignatureProvider.fromTypes;
import static java.lang.Double.NaN;
import static java.lang.Integer.max;
import static java.lang.Integer.min;
import static java.lang.String.format;
public class TestQuantileDigestAggregationFunction
extends AbstractTestFunctions
{
private static final Joiner ARRAY_JOINER = Joiner.on(",");
private static final Metadata METADATA = createTestMetadataManager();
@Test
public void testDoublesWithWeights()
{
testAggregationDouble(
createDoublesBlock(1.0, null, 2.0, null, 3.0, null, 4.0, null, 5.0, null),
createRLEBlock(1, 10),
0.01, 1.0, 2.0, 3.0, 4.0, 5.0);
testAggregationDouble(
createDoublesBlock(null, null, null, null, null),
createRLEBlock(1, 5),
NaN);
testAggregationDouble(
createDoublesBlock(-1.0, -2.0, -3.0, -4.0, -5.0, -6.0, -7.0, -8.0, -9.0, -10.0),
createRLEBlock(1, 10),
0.01, -1.0, -2.0, -3.0, -4.0, -5.0, -6.0, -7.0, -8.0, -9.0, -10.0);
testAggregationDouble(
createDoublesBlock(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0),
createRLEBlock(1, 10),
0.01, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0);
testAggregationDouble(
createDoublesBlock(),
createRLEBlock(1, 0),
NaN);
testAggregationDouble(
createDoublesBlock(1.0),
createRLEBlock(1, 1),
0.01, 1.0);
testAggregationDouble(
createDoubleSequenceBlock(-1000, 1000),
createRLEBlock(1, 2000),
0.01,
LongStream.range(-1000, 1000).asDoubleStream().toArray());
}
@Test
public void testRealsWithWeights()
{
testAggregationReal(
createBlockOfReals(1.0F, null, 2.0F, null, 3.0F, null, 4.0F, null, 5.0F, null),
createRLEBlock(1, 10),
0.01, 1.0F, 2.0F, 3.0F, 4.0F, 5.0F);
testAggregationReal(
createBlockOfReals(null, null, null, null, null),
createRLEBlock(1, 5),
NaN);
testAggregationReal(
createBlockOfReals(-1.0F, -2.0F, -3.0F, -4.0F, -5.0F, -6.0F, -7.0F, -8.0F, -9.0F, -10.0F),
createRLEBlock(1, 10),
0.01, -1.0F, -2.0F, -3.0F, -4.0F, -5.0F, -6.0F, -7.0F, -8.0F, -9.0F, -10.0F);
testAggregationReal(
createBlockOfReals(1.0F, 2.0F, 3.0F, 4.0F, 5.0F, 6.0F, 7.0F, 8.0F, 9.0F, 10.0F),
createRLEBlock(1, 10),
0.01, 1.0F, 2.0F, 3.0F, 4.0F, 5.0F, 6.0F, 7.0F, 8.0F, 9.0F, 10.0F);
testAggregationReal(
createBlockOfReals(),
createRLEBlock(1, 0),
NaN);
testAggregationReal(
createBlockOfReals(1.0F),
createRLEBlock(1, 1),
0.01, 1.0F);
testAggregationReal(
createSequenceBlockOfReal(-1000, 1000),
createRLEBlock(1, 2000),
0.01,
Floats.toArray(LongStream.range(-1000, 1000).mapToObj(Float::new).collect(toImmutableList())));
}
@Test
public void testBigintsWithWeight()
{
testAggregationBigint(
createLongsBlock(1L, null, 2L, null, 3L, null, 4L, null, 5L, null),
createRLEBlock(1, 10),
0.01, 1, 2, 3, 4, 5);
testAggregationBigint(
createLongsBlock(null, null, null, null, null),
createRLEBlock(1, 5),
NaN);
testAggregationBigint(
createLongsBlock(-1, -2, -3, -4, -5, -6, -7, -8, -9, -10),
createRLEBlock(1, 10),
0.01, -1, -2, -3, -4, -5, -6, -7, -8, -9, -10);
testAggregationBigint(
createLongsBlock(1, 2, 3, 4, 5, 6, 7, 8, 9, 10),
createRLEBlock(1, 10),
0.01, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
testAggregationBigint(
createLongsBlock(new int[] {}),
createRLEBlock(1, 0),
NaN);
testAggregationBigint(
createLongsBlock(1),
createRLEBlock(1, 1),
0.01, 1);
testAggregationBigint(
createLongSequenceBlock(-1000, 1000),
createRLEBlock(1, 2000),
0.01,
LongStream.range(-1000, 1000).toArray());
}
private InternalAggregationFunction getAggregationFunction(Type... types)
{
return METADATA.getAggregateFunctionImplementation(METADATA.resolveFunction(QualifiedName.of("qdigest_agg"), fromTypes(types)));
}
private void testAggregationBigint(Block inputBlock, Block weightsBlock, double maxError, long... inputs)
{
// Test without weights and accuracy
testAggregationBigints(
getAggregationFunction(BIGINT),
new Page(inputBlock),
maxError,
inputs);
// Test with weights and without accuracy
testAggregationBigints(
getAggregationFunction(BIGINT, BIGINT),
new Page(inputBlock, weightsBlock),
maxError,
inputs);
// Test with weights and accuracy
testAggregationBigints(
getAggregationFunction(BIGINT, BIGINT, DOUBLE),
new Page(inputBlock, weightsBlock, createRLEBlock(maxError, inputBlock.getPositionCount())),
maxError,
inputs);
}
private void testAggregationReal(Block longsBlock, Block weightsBlock, double maxError, float... inputs)
{
// Test without weights and accuracy
testAggregationReal(
getAggregationFunction(REAL),
new Page(longsBlock),
maxError,
inputs);
// Test with weights and without accuracy
testAggregationReal(
getAggregationFunction(REAL, BIGINT),
new Page(longsBlock, weightsBlock),
maxError,
inputs);
// Test with weights and accuracy
testAggregationReal(
getAggregationFunction(REAL, BIGINT, DOUBLE),
new Page(longsBlock, weightsBlock, createRLEBlock(maxError, longsBlock.getPositionCount())),
maxError,
inputs);
}
private void testAggregationDouble(Block longsBlock, Block weightsBlock, double maxError, double... inputs)
{
// Test without weights and accuracy
testAggregationDoubles(
getAggregationFunction(DOUBLE),
new Page(longsBlock),
maxError,
inputs);
// Test with weights and without accuracy
testAggregationDoubles(
getAggregationFunction(DOUBLE, BIGINT),
new Page(longsBlock, weightsBlock),
maxError,
inputs);
// Test with weights and accuracy
testAggregationDoubles(
getAggregationFunction(DOUBLE, BIGINT, DOUBLE),
new Page(longsBlock, weightsBlock, createRLEBlock(maxError, longsBlock.getPositionCount())),
maxError,
inputs);
}
private void testAggregationBigints(InternalAggregationFunction function, Page page, double maxError, long... inputs)
{
// aggregate level
assertAggregation(function,
QDIGEST_EQUALITY,
"test multiple positions",
page,
getExpectedValueLongs(maxError, inputs));
// test scalars
List<Long> rows = Arrays.stream(inputs).sorted().boxed().collect(Collectors.toList());
SqlVarbinary returned = (SqlVarbinary) AggregationTestUtils.aggregation(function, page);
assertPercentileWithinError(StandardTypes.BIGINT, returned, maxError, rows, 0.1, 0.5, 0.9, 0.99);
}
private void testAggregationDoubles(InternalAggregationFunction function, Page page, double maxError, double... inputs)
{
assertAggregation(function,
QDIGEST_EQUALITY,
"test multiple positions",
page,
getExpectedValueDoubles(maxError, inputs));
// test scalars
List<Double> rows = Arrays.stream(inputs).sorted().boxed().collect(Collectors.toList());
SqlVarbinary returned = (SqlVarbinary) AggregationTestUtils.aggregation(function, page);
assertPercentileWithinError(StandardTypes.DOUBLE, returned, maxError, rows, 0.1, 0.5, 0.9, 0.99);
}
private void testAggregationReal(InternalAggregationFunction function, Page page, double maxError, float... inputs)
{
assertAggregation(function,
QDIGEST_EQUALITY,
"test multiple positions",
page,
getExpectedValuesFloats(maxError, inputs));
// test scalars
List<Double> rows = Floats.asList(inputs).stream().sorted().map(Float::doubleValue).collect(Collectors.toList());
SqlVarbinary returned = (SqlVarbinary) AggregationTestUtils.aggregation(function, page);
assertPercentileWithinError(StandardTypes.REAL, returned, maxError, rows, 0.1, 0.5, 0.9, 0.99);
}
private Object getExpectedValueLongs(double maxError, long... values)
{
if (values.length == 0) {
return null;
}
QuantileDigest qdigest = new QuantileDigest(maxError);
Arrays.stream(values).forEach(qdigest::add);
return new SqlVarbinary(qdigest.serialize().getBytes());
}
private Object getExpectedValueDoubles(double maxError, double... values)
{
if (values.length == 0) {
return null;
}
QuantileDigest qdigest = new QuantileDigest(maxError);
Arrays.stream(values).forEach(value -> qdigest.add(doubleToSortableLong(value)));
return new SqlVarbinary(qdigest.serialize().getBytes());
}
private Object getExpectedValuesFloats(double maxError, float... values)
{
if (values.length == 0) {
return null;
}
QuantileDigest qdigest = new QuantileDigest(maxError);
Floats.asList(values).forEach(value -> qdigest.add(floatToSortableInt(value)));
return new SqlVarbinary(qdigest.serialize().getBytes());
}
private void assertPercentileWithinError(String type, SqlVarbinary binary, double error, List<? extends Number> rows, double... percentiles)
{
if (rows.isEmpty()) {
// Nothing to assert except that the qdigest is empty
return;
}
// Test each quantile individually (value_at_quantile)
for (double percentile : percentiles) {
assertPercentileWithinError(type, binary, error, rows, percentile);
}
// Test all the quantiles (values_at_quantiles)
assertPercentilesWithinError(type, binary, error, rows, percentiles);
}
private void assertPercentileWithinError(String type, SqlVarbinary binary, double error, List<? extends Number> rows, double percentile)
{
Number lowerBound = getLowerBound(error, rows, percentile);
Number upperBound = getUpperBound(error, rows, percentile);
// Check that the chosen quantile is within the upper and lower bound of the error
functionAssertions.assertFunction(
format("value_at_quantile(CAST(X'%s' AS qdigest(%s)), %s) >= %s", binary.toString().replaceAll("\\s+", " "), type, percentile, lowerBound),
BOOLEAN,
true);
functionAssertions.assertFunction(
format("value_at_quantile(CAST(X'%s' AS qdigest(%s)), %s) <= %s", binary.toString().replaceAll("\\s+", " "), type, percentile, upperBound),
BOOLEAN,
true);
}
private void assertPercentilesWithinError(String type, SqlVarbinary binary, double error, List<? extends Number> rows, double[] percentiles)
{
List<Double> boxedPercentiles = Arrays.stream(percentiles).sorted().boxed().collect(toImmutableList());
List<Number> lowerBounds = boxedPercentiles.stream().map(percentile -> getLowerBound(error, rows, percentile)).collect(toImmutableList());
List<Number> upperBounds = boxedPercentiles.stream().map(percentile -> getUpperBound(error, rows, percentile)).collect(toImmutableList());
// Ensure that the lower bound of each item in the distribution is not greater than the chosen quantiles
functionAssertions.assertFunction(
format(
"zip_with(values_at_quantiles(CAST(X'%s' AS qdigest(%s)), ARRAY[%s]), ARRAY[%s], (value, lowerbound) -> value >= lowerbound)",
binary.toString().replaceAll("\\s+", " "),
type,
ARRAY_JOINER.join(boxedPercentiles),
ARRAY_JOINER.join(lowerBounds)),
METADATA.getType(arrayType(BOOLEAN.getTypeSignature())),
Collections.nCopies(percentiles.length, true));
// Ensure that the upper bound of each item in the distribution is not less than the chosen quantiles
functionAssertions.assertFunction(
format(
"zip_with(values_at_quantiles(CAST(X'%s' AS qdigest(%s)), ARRAY[%s]), ARRAY[%s], (value, upperbound) -> value <= upperbound)",
binary.toString().replaceAll("\\s+", " "),
type,
ARRAY_JOINER.join(boxedPercentiles),
ARRAY_JOINER.join(upperBounds)),
METADATA.getType(arrayType(BOOLEAN.getTypeSignature())),
Collections.nCopies(percentiles.length, true));
}
private Number getLowerBound(double error, List<? extends Number> rows, double percentile)
{
int medianIndex = (int) (rows.size() * percentile);
int marginOfError = (int) (rows.size() * error / 2);
return rows.get(max(medianIndex - marginOfError, 0));
}
private Number getUpperBound(double error, List<? extends Number> rows, double percentile)
{
int medianIndex = (int) (rows.size() * percentile);
int marginOfError = (int) (rows.size() * error / 2);
return rows.get(min(medianIndex + marginOfError, rows.size() - 1));
}
}
| |
package org.ethereum.crypto;
import org.ethereum.core.Transaction;
import org.ethereum.crypto.ECKey.ECDSASignature;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.spongycastle.util.encoders.Hex;
import java.io.IOException;
import java.math.BigInteger;
import java.security.SignatureException;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.Executors;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.junit.Assert.*;
public class ECKeyTest {
private static final Logger log = LoggerFactory.getLogger(ECKeyTest.class);
private String privString = "3ecb44df2159c26e0f995712d4f39b6f6e499b40749b1cf1246c37f9516cb6a4";
private BigInteger privateKey = new BigInteger(Hex.decode(privString));
private String pubString = "0497466f2b32bc3bb76d4741ae51cd1d8578b48d3f1e68da206d47321aec267ce78549b514e4453d74ef11b0cd5e4e4c364effddac8b51bcfc8de80682f952896f";
private String compressedPubString = "0397466f2b32bc3bb76d4741ae51cd1d8578b48d3f1e68da206d47321aec267ce7";
private byte[] pubKey = Hex.decode(pubString);
private byte[] compressedPubKey = Hex.decode(compressedPubString);
private String address = "8a40bfaa73256b60764c1bf40675a99083efb075";
private String exampleMessage = new String("This is an example of a signed message.");
private String sigBase64 = "HD5AsBr4wuH6UU9tXuSJhUvgfGayfwoY0cKT03sFUjnpQsupHznd/3mCIRfLuNHlRCVGdAyHecdyM8IVZMtc1I8=";
@Test
public void testHashCode() {
Assert.assertEquals(1866897155, ECKey.fromPrivate(privateKey).hashCode());
}
@Test
public void testECKey() {
ECKey key = new ECKey();
assertTrue(key.isPubKeyCanonical());
assertNotNull(key.getPubKey());
assertNotNull(key.getPrivKeyBytes());
log.debug(Hex.toHexString(key.getPrivKeyBytes()) + " :Generated privkey");
log.debug(Hex.toHexString(key.getPubKey()) + " :Generated pubkey");
}
@Test
public void testFromPrivateKey() {
ECKey key = ECKey.fromPrivate(privateKey).decompress();
assertTrue(key.isPubKeyCanonical());
assertTrue(key.hasPrivKey());
assertArrayEquals(pubKey, key.getPubKey());
}
@Test(expected = IllegalArgumentException.class)
public void testPrivatePublicKeyBytesNoArg() {
new ECKey(null, null);
fail("Expecting an IllegalArgumentException for using only null-parameters");
}
@Test
public void testIsPubKeyOnly() {
ECKey key = ECKey.fromPublicOnly(pubKey);
assertTrue(key.isPubKeyCanonical());
assertTrue(key.isPubKeyOnly());
assertArrayEquals(key.getPubKey(), pubKey);
}
@Test
public void testPublicKeyFromPrivate() {
byte[] pubFromPriv = ECKey.publicKeyFromPrivate(privateKey, false);
assertArrayEquals(pubKey, pubFromPriv);
}
@Test
public void testPublicKeyFromPrivateCompressed() {
byte[] pubFromPriv = ECKey.publicKeyFromPrivate(privateKey, true);
assertArrayEquals(compressedPubKey, pubFromPriv);
}
@Test
public void testGetAddress() {
ECKey key = ECKey.fromPublicOnly(pubKey);
assertArrayEquals(Hex.decode(address), key.getAddress());
}
@Test
public void testToString() {
ECKey key = ECKey.fromPrivate(BigInteger.TEN); // An example private key.
assertEquals("pub:04a0434d9e47f3c86235477c7b1ae6ae5d3442d49b1943c2b752a68e2a47e247c7893aba425419bc27a3b6c7e693a24c696f794c2ed877a1593cbee53b037368d7", key.toString());
}
@Test
public void testEthereumSign() throws IOException {
// TODO: Understand why key must be decompressed for this to work
ECKey key = ECKey.fromPrivate(privateKey).decompress();
System.out.println("Secret\t: " + Hex.toHexString(key.getPrivKeyBytes()));
System.out.println("Pubkey\t: " + Hex.toHexString(key.getPubKey()));
System.out.println("Data\t: " + exampleMessage);
byte[] messageHash = HashUtil.sha3(exampleMessage.getBytes());
ECDSASignature signature = key.sign(messageHash);
String output = signature.toBase64();
System.out.println("Signtr\t: " + output + " (Base64, length: " + output.length() + ")");
assertEquals(sigBase64, output);
}
@Test
public void testVerifySignature1() {
ECKey key = ECKey.fromPublicOnly(pubKey);
BigInteger r = new BigInteger("28157690258821599598544026901946453245423343069728565040002908283498585537001");
BigInteger s = new BigInteger("30212485197630673222315826773656074299979444367665131281281249560925428307087");
ECDSASignature sig = ECDSASignature.fromComponents(r.toByteArray(), s.toByteArray(), (byte) 28);
key.verify(HashUtil.sha3(exampleMessage.getBytes()), sig);
}
@Test
public void testVerifySignature2() {
BigInteger r = new BigInteger("c52c114d4f5a3ba904a9b3036e5e118fe0dbb987fe3955da20f2cd8f6c21ab9c", 16);
BigInteger s = new BigInteger("6ba4c2874299a55ad947dbc98a25ee895aabf6b625c26c435e84bfd70edf2f69", 16);
ECDSASignature sig = ECDSASignature.fromComponents(r.toByteArray(), s.toByteArray(), (byte) 0x1b);
byte[] rawtx = Hex.decode("f82804881bc16d674ec8000094cd2a3d9f938e13cd947ec05abc7fe734df8dd8268609184e72a0006480");
try {
ECKey key = ECKey.signatureToKey(HashUtil.sha3(rawtx), sig.toBase64());
System.out.println("Signature public key\t: " + Hex.toHexString(key.getPubKey()));
System.out.println("Sender is\t\t: " + Hex.toHexString(key.getAddress()));
assertEquals("cd2a3d9f938e13cd947ec05abc7fe734df8dd826", Hex.toHexString(key.getAddress()));
key.verify(HashUtil.sha3(rawtx), sig);
} catch (SignatureException e) {
fail();
}
}
@Test
public void testVerifySignature3() throws SignatureException {
byte[] rawtx = Hex.decode("f86e80893635c9adc5dea000008609184e72a00082109f9479b08ad8787060333663d19704909ee7b1903e58801ba0899b92d0c76cbf18df24394996beef19c050baa9823b4a9828cd9b260c97112ea0c9e62eb4cf0a9d95ca35c8830afac567619d6b3ebee841a3c8be61d35acd8049");
Transaction tx = new Transaction(rawtx);
ECKey key = ECKey.signatureToKey(HashUtil.sha3(rawtx), tx.getSignature().toBase64());
System.out.println("Signature public key\t: " + Hex.toHexString(key.getPubKey()));
System.out.println("Sender is\t\t: " + Hex.toHexString(key.getAddress()));
// sender: CD2A3D9F938E13CD947EC05ABC7FE734DF8DD826
// todo: add test assertion when the sign/verify part actually works.
}
@Test
public void testSValue() throws Exception {
// Check that we never generate an S value that is larger than half the curve order. This avoids a malleability
// issue that can allow someone to change a transaction [hash] without invalidating the signature.
final int ITERATIONS = 10;
ListeningExecutorService executor = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(ITERATIONS));
List<ListenableFuture<ECKey.ECDSASignature>> sigFutures = Lists.newArrayList();
final ECKey key = new ECKey();
for (byte i = 0; i < ITERATIONS; i++) {
final byte[] hash = HashUtil.sha3(new byte[]{i});
sigFutures.add(executor.submit(new Callable<ECDSASignature>() {
@Override
public ECKey.ECDSASignature call() throws Exception {
return key.doSign(hash);
}
}));
}
List<ECKey.ECDSASignature> sigs = Futures.allAsList(sigFutures).get();
for (ECKey.ECDSASignature signature : sigs) {
assertTrue(signature.s.compareTo(ECKey.HALF_CURVE_ORDER) <= 0);
}
final ECKey.ECDSASignature duplicate = new ECKey.ECDSASignature(sigs.get(0).r, sigs.get(0).s);
assertEquals(sigs.get(0), duplicate);
assertEquals(sigs.get(0).hashCode(), duplicate.hashCode());
}
@Test
public void testSignVerify() {
ECKey key = ECKey.fromPrivate(privateKey);
String message = new String("This is an example of a signed message.");
ECDSASignature output = key.doSign(message.getBytes());
assertTrue(key.verify(message.getBytes(), output));
}
@Test
public void testIsPubKeyCanonicalCorect() {
// Test correct prefix 4, right length 65
byte[] canonicalPubkey1 = new byte[65];
canonicalPubkey1[0] = 0x04;
assertTrue(ECKey.isPubKeyCanonical(canonicalPubkey1));
// Test correct prefix 2, right length 33
byte[] canonicalPubkey2 = new byte[33];
canonicalPubkey2[0] = 0x02;
assertTrue(ECKey.isPubKeyCanonical(canonicalPubkey2));
// Test correct prefix 3, right length 33
byte[] canonicalPubkey3 = new byte[33];
canonicalPubkey3[0] = 0x03;
assertTrue(ECKey.isPubKeyCanonical(canonicalPubkey3));
}
@Test
public void testIsPubKeyCanonicalWrongLength() {
// Test correct prefix 4, but wrong length !65
byte[] nonCanonicalPubkey1 = new byte[64];
nonCanonicalPubkey1[0] = 0x04;
assertFalse(ECKey.isPubKeyCanonical(nonCanonicalPubkey1));
// Test correct prefix 2, but wrong length !33
byte[] nonCanonicalPubkey2 = new byte[32];
nonCanonicalPubkey2[0] = 0x02;
assertFalse(ECKey.isPubKeyCanonical(nonCanonicalPubkey2));
// Test correct prefix 3, but wrong length !33
byte[] nonCanonicalPubkey3 = new byte[32];
nonCanonicalPubkey3[0] = 0x03;
assertFalse(ECKey.isPubKeyCanonical(nonCanonicalPubkey3));
}
@Test
public void testIsPubKeyCanonicalWrongPrefix() {
// Test wrong prefix 4, right length 65
byte[] nonCanonicalPubkey4 = new byte[65];
assertFalse(ECKey.isPubKeyCanonical(nonCanonicalPubkey4));
// Test wrong prefix 2, right length 33
byte[] nonCanonicalPubkey5 = new byte[33];
assertFalse(ECKey.isPubKeyCanonical(nonCanonicalPubkey5));
// Test wrong prefix 3, right length 33
byte[] nonCanonicalPubkey6 = new byte[33];
assertFalse(ECKey.isPubKeyCanonical(nonCanonicalPubkey6));
}
@Test
public void keyRecovery() throws Exception {
ECKey key = new ECKey();
String message = "Hello World!";
byte[] hash = HashUtil.sha256(message.getBytes());
ECKey.ECDSASignature sig = key.doSign(hash);
key = ECKey.fromPublicOnly(key.getPubKeyPoint());
boolean found = false;
for (int i = 0; i < 4; i++) {
ECKey key2 = ECKey.recoverFromSignature(i, sig, hash, true);
checkNotNull(key2);
if (key.equals(key2)) {
found = true;
break;
}
}
assertTrue(found);
}
@Test
public void testSignedMessageToKey() throws SignatureException {
byte[] messageHash = HashUtil.sha3(exampleMessage.getBytes());
ECKey key = ECKey.signatureToKey(messageHash, sigBase64);
assertNotNull(key);
assertArrayEquals(pubKey, key.getPubKey());
}
@Test
public void testGetPrivKeyBytes() {
ECKey key = new ECKey();
assertNotNull(key.getPrivKeyBytes());
assertEquals(32, key.getPrivKeyBytes().length);
}
@Test
public void testEqualsObject() {
ECKey key0 = new ECKey();
ECKey key1 = ECKey.fromPrivate(privateKey);
ECKey key2 = ECKey.fromPrivate(privateKey);
assertFalse(key0.equals(key1));
assertTrue(key1.equals(key1));
assertTrue(key1.equals(key2));
}
@Test
public void decryptAECSIC(){
ECKey key = ECKey.fromPrivate(Hex.decode("abb51256c1324a1350598653f46aa3ad693ac3cf5d05f36eba3f495a1f51590f"));
byte[] payload = key.decryptAES(Hex.decode("84a727bc81fa4b13947dc9728b88fd08"));
System.out.println(Hex.toHexString(payload));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.river.qa.harness;
import java.io.IOException;
import java.rmi.ConnectException;
import java.rmi.MarshalledObject;
import java.rmi.RemoteException;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.logging.Level;
import java.util.logging.Logger;
import net.jini.io.MarshalledInstance;
/**
* An admin for a nonactivatable service which passes a request to
* start a service to a NonActivatableGroup. If a global group does
* not exist, or if the group is intended to be private to the service,
* then a <code>NonActivatableGroup</code> will be created.
* <p>
*
* The logger named <code>org.apache.river.qa.harness.service</code> is used
* to log debug information
*
* <table border=1 cellpadding=5>
*
*<tr> <th> Level <th> Description
*
*<tr> <td> FINE <td> parameter values used to start the service
*</table> <p>
*/
public class NonActivatableServiceStarterAdmin extends AbstractServiceAdmin
implements Admin
{
/** the service proxy */
private Object serviceRef;
/** the groupAdmin for the group in which the service is to run */
private NonActivatableGroupAdmin groupAdmin;
/** flag indicating whether the group is private to this service */
private boolean privateGroup = false;
/** the admin manager for the test */
private final AdminManager manager;
/**
* Construct a <code>NonActivatableServiceStarterAdmin</code>.
*
* @param config the configuration object for the test run
* @param serviceName the prefix used to build the property
* names needed to acquire service parameters
* @param index the instance number for this service
* @param manager the admin manager
*/
public NonActivatableServiceStarterAdmin(QAConfig config,
String serviceName,
int index,
AdminManager manager)
{
super(config, serviceName, index);
this.groupAdmin = manager.getNonActivatableGroupAdmin();
this.manager = manager;
}
/* inherit javadoc */
public synchronized Object getProxy() {
return serviceRef;
}
/**
* Configures and starts an instance of the service described by the
* <code>serviceName</code> provided in the constructor.
* <p>
* The option arguments for the service are generated, containing the name
* of the service configuration file and overrides for the groups, locators,
* and membergroups entries to be used by the service. A
* <code>NonActivatableServiceDescriptor</code> is created using these
* arguments and other required parameters (i.e. classpath, codebase, etc).
* <p>
* After the service is started, the service
* proxy is prepared by calling the <code>doProxyPreparation</code>
* method.
* <p>
* This admin administratively sets the lookup port, lookup group
* and locators, and any member groups that were specified and
* also passed as overrides. This behavior is considered <b>temporary</b>
* until all tests have been modified to work with override
* entry names.
*
* @throws TestException if any of the mandatory service
* properties cannot be found. The mandatory
* properties are: <code>impl,
* codebase, classpath, policyfile</code>.
* It is also thrown if the activation system is
* not up, if a class server is not up, if the
* class server cannot access the JAR files
* specified by the codebase parameter, if any
* exception is thrown while attempting to start
* the service, or if generation of the
* configuration file fails.
* @throws RemoteException never. Any <code>RemoteExceptions</code> which
* occur while attempting to start the service
* will be wrapped in a
* {@link org.apache.river.qa.harness.TestException}.
*/
public synchronized void start() throws RemoteException, TestException {
try {
// generate the overrides string array
ArrayList optionsList = new ArrayList();
addServiceConfigurationFileName(optionsList); // must be first
addServiceGroupsAndLocators(optionsList);
addServiceMemberGroups(optionsList);
addServiceUnicastDiscoveryPort(optionsList);
addServicePersistenceLog(optionsList);
addServiceExporter(optionsList);
addRegisteredOverrides(optionsList);
String[] serviceConfigArgs =
(String[]) optionsList.toArray(new String[0]);
NonActivatableGroup group = (NonActivatableGroup) getGroup();
// get all loggable parameters and log them before trying
// to call group.startService, so the info is available in case
// something goes wrong
getServiceCodebase();
getServicePolicyFile();
getServiceClasspath();
getServiceImpl();
getStarterConfiguration();
getServicePreparerName();
logServiceParameters(); // log debug output
logOverrides(serviceConfigArgs);
serviceRef = group.startService(getCodebase(),
getPolicyFile(),
getClasspath(),
getImpl(),
serviceConfigArgs,
getStarterConfigurationFileName(), getTransformer());
} catch (Exception e) {
throw new TestException("Problem creating service for "
+ serviceName, e);
}
//XXX temporary work-around for jrmp dgc problem
// try {
// serviceRef = new MarshalledInstance(serviceRef).get(false);
// } catch (IOException e) {
// throw new TestException("Problem unmarshalling proxy", e);
// } catch (ClassNotFoundException e) {
// throw new TestException("Problem unmarshalling proxy", e);
// }
serviceRef = doProxyPreparation(serviceRef);
}
/**
* Administratively destroys the service managed by this admin. Regardless
* of the success or failure of the attempt to destroy the service, if the
* group in which this service is running is private to the service, then
* the group is also destroyed by calling the <code>stop</code> method for
* the groups admin. Failure to destroy the private group will generate a
* stack trace but will not affect the return semantics of the call.
*
* @throws RemoteException
* when a communication failure occurs between the front-end
* and the back-end of the service while attempting to destroy
* the service.
*/
public synchronized void stop() throws RemoteException {
try {
ServiceDestroyer.destroy(serviceRef);
} catch (ConnectException e) {
logger.log(Level.INFO,
"Service Object is gone, presumed killed by test");
} finally {
if (privateGroup) {
logger.log(Level.FINE, "Destroying service-private group");
try {
groupAdmin.stop(); //best effort
} catch (Exception e) {
logger.log(Level.INFO,
"Attempt to stop private group failed",
e);
}
}
}
}
/**
* Obtain the group in which to run the service. If a non-null
* groupAdmin was passed to the constructor, then use that group.
* Otherwise, create a shared <code>NonActivatableGroup</code> if the test
* property "org.apache.river.qa.harness.shared" is undefined or has the
* value <code>true.</code> If that property is defined and has the value
* <code>false,</code> create a group which is private to the service.
*
* @return the NonActivatableGroup proxy
* @throws TestException if the group could not be created
*/
private NonActivatableGroup getGroup() throws TestException {
if (groupAdmin == null) {
if (config.getBooleanConfigVal("org.apache.river.qa.harness.shared",
true))
{
logger.log(Level.FINER, "Creating shared group");
Object group = null;
try {
group = manager.startService("nonActivatableGroup");
} catch (Exception e) {
throw new TestException("Failed to start the shared "
+ "nonactivatable group",
e);
}
groupAdmin = (NonActivatableGroupAdmin) manager.getAdmin(group);
} else {
logger.log(Level.FINER, "Creating private group");
groupAdmin = new NonActivatableGroupAdmin(config,
"nonActivatableGroup",
index,
getServiceOptions(),
getServiceProperties());
try {
groupAdmin.start();
} catch (Exception e) {
throw new TestException("Failed to start "
+ "nonactivatable group",
e);
}
privateGroup = true;
}
}
return (NonActivatableGroup) groupAdmin.getProxy();
}
}
| |
/*=========================================================================
* Copyright (c) 2002-2014 Pivotal Software, Inc. All Rights Reserved.
* This product is protected by U.S. and international copyright
* and intellectual property laws. Pivotal products are covered by
* more patents listed at http://www.pivotal.io/patents.
*=========================================================================
*/
package com.gemstone.gemfire.pdx;
import static org.junit.Assert.assertEquals;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import com.gemstone.gemfire.DataSerializer;
import com.gemstone.gemfire.ToDataException;
import com.gemstone.gemfire.cache.Cache;
import com.gemstone.gemfire.cache.CacheFactory;
import com.gemstone.gemfire.cache.DataPolicy;
import com.gemstone.gemfire.cache.Region;
import com.gemstone.gemfire.cache.RegionShortcut;
import com.gemstone.gemfire.cache.client.PoolManager;
import com.gemstone.gemfire.internal.AvailablePortHelper;
import com.gemstone.gemfire.internal.FileUtil;
import com.gemstone.gemfire.internal.HeapDataOutputStream;
import com.gemstone.gemfire.internal.Version;
import com.gemstone.gemfire.internal.cache.GemFireCacheImpl;
import com.gemstone.gemfire.pdx.internal.PeerTypeRegistration;
import com.gemstone.gemfire.test.junit.categories.IntegrationTest;
/**
* @author dsmith
*
*/
@Category(IntegrationTest.class)
public class PdxAttributesJUnitTest {
private File diskDir;
@Before
public void setUp() {
diskDir = new File("PdxAttributesJUnitTest");
GemFireCacheImpl.setDefaultDiskStoreName("PDXAttributesDefault");
diskDir.mkdirs();
}
@After
public void tearDown() throws Exception {
GemFireCacheImpl instance = GemFireCacheImpl.getInstance();
if(instance != null) {
instance.close();
}
FileUtil.delete(diskDir);
File[] defaultStoreFiles = new File(".").listFiles(new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.startsWith("BACKUPPDXAttributes");
}
});
for(File file: defaultStoreFiles) {
FileUtil.delete(file);
}
}
@Test
public void testPdxPersistent() throws Exception {
{
CacheFactory cf = new CacheFactory();
cf.set("mcast-port", "0");
Cache cache = cf.create();
//define a type
defineAType();
Region pdxRegion = cache.getRegion(PeerTypeRegistration.REGION_NAME);
assertEquals(DataPolicy.REPLICATE, pdxRegion.getAttributes().getDataPolicy());
}
tearDown();
setUp();
{
CacheFactory cf = new CacheFactory();
cf.set("mcast-port", "0");
cf.setPdxPersistent(true);
Cache cache = cf.create();
//define a type
defineAType();
Region pdxRegion = cache.getRegion(PeerTypeRegistration.REGION_NAME);
assertEquals(DataPolicy.PERSISTENT_REPLICATE, pdxRegion.getAttributes().getDataPolicy());
cache.close();
}
}
@Test
public void testPdxDiskStore() throws Exception {
{
CacheFactory cf = new CacheFactory();
cf.set("mcast-port", "0");
cf.setPdxPersistent(true);
cf.setPdxDiskStore("diskstore1");
Cache cache = cf.create();
cache.createDiskStoreFactory().setDiskDirs(new File[] {diskDir}).setMaxOplogSize(1).create("diskstore1");
//define a type.
defineAType();
Region pdxRegion = cache.getRegion(PeerTypeRegistration.REGION_NAME);
assertEquals("diskstore1", pdxRegion.getAttributes().getDiskStoreName());
cache.close();
}
tearDown();
setUp();
{
CacheFactory cf = new CacheFactory();
cf.set("mcast-port", "0");
cf.setPdxPersistent(true);
Cache cache = cf.create();
//define a type
defineAType();
Region pdxRegion = cache.getRegion(PeerTypeRegistration.REGION_NAME);
assertEquals(DataPolicy.PERSISTENT_REPLICATE, pdxRegion.getAttributes().getDataPolicy());
cache.close();
}
}
@Test
public void testNonPersistentRegistryWithOverflowRegion() throws Exception {
{
CacheFactory cf = new CacheFactory();
cf.set("mcast-port", "0");
Cache cache = cf.create();
cache.createDiskStoreFactory().setDiskDirs(new File[] {diskDir}).setMaxOplogSize(1).create("diskstore1");
cache.createRegionFactory(RegionShortcut.LOCAL_OVERFLOW).setDiskStoreName("diskstore1").create("region");
defineAType();
}
tearDown();
setUp();
{
CacheFactory cf = new CacheFactory();
cf.set("mcast-port", "0");
Cache cache = cf.create();
defineAType();
cache.createDiskStoreFactory().setDiskDirs(new File[] {diskDir}).setMaxOplogSize(1).create("diskstore1");
cache.createRegionFactory(RegionShortcut.LOCAL_OVERFLOW).setDiskStoreName("diskstore1").create("region");
}
}
@Test
public void testNonPersistentRegistryWithPersistentRegion() throws Exception {
{
CacheFactory cf = new CacheFactory();
cf.set("mcast-port", "0");
Cache cache = cf.create();
cache.createDiskStoreFactory().setDiskDirs(new File[] {diskDir}).setMaxOplogSize(1).create("diskstore1");
cache.createRegionFactory(RegionShortcut.LOCAL_PERSISTENT).setDiskStoreName("diskstore1").create("region");
try {
defineATypeNoEnum();
throw new RuntimeException("Should have received an exception");
} catch(PdxInitializationException expected) {
}
}
tearDown();
setUp();
{
CacheFactory cf = new CacheFactory();
cf.set("mcast-port", "0");
Cache cache = cf.create();
defineATypeNoEnum();
cache.createDiskStoreFactory().setDiskDirs(new File[] {diskDir}).setMaxOplogSize(1).create("diskstore1");
try {
cache.createRegionFactory(RegionShortcut.LOCAL_PERSISTENT).setDiskStoreName("diskStore1").create("region");
throw new RuntimeException("Should have received an exception");
} catch(PdxInitializationException expected) {
}
}
}
/**
* Test that loner VMs lazily determine if they
* are a client or a peer.
* @throws Exception
*/
@Test
public void testLazyLoner() throws Exception {
//Test that we can become a peer registry
{
CacheFactory cf = new CacheFactory();
cf.set("mcast-port", "0");
Cache cache = cf.create();
//This should work, because this is a peer.
defineAType();
}
tearDown();
setUp();
//Test that we can become a client registry.
{
CacheFactory cf = new CacheFactory();
cf.set("mcast-port", "0");
Cache cache = cf.create();
int port = AvailablePortHelper.getRandomAvailableTCPPort();
PoolManager.createFactory().addServer("localhost", port).create("pool");
try {
defineAType();
throw new RuntimeException("Should have failed, this is a client that can't connect to a server");
} catch(ToDataException expected) {
//do nothing.
}
}
}
private void defineAType() throws IOException {
SimpleClass sc = new SimpleClass(1, (byte) 2);
HeapDataOutputStream out = new HeapDataOutputStream(Version.CURRENT);
DataSerializer.writeObject(sc, out);
}
private void defineATypeNoEnum() throws IOException {
SimpleClass sc = new SimpleClass(1, (byte) 2, null);
HeapDataOutputStream out = new HeapDataOutputStream(Version.CURRENT);
DataSerializer.writeObject(sc, out);
}
}
| |
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2017 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.core.dialog;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Dialog;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.TableItem;
import org.pentaho.di.ExecutionConfiguration;
import org.pentaho.di.base.AbstractMeta;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.ui.core.PropsUI;
import org.pentaho.di.ui.core.gui.GUIResource;
import org.pentaho.di.ui.core.gui.WindowProperty;
import org.pentaho.di.ui.core.widget.ColumnInfo;
import org.pentaho.di.ui.core.widget.TableView;
import org.pentaho.di.ui.spoon.Spoon;
import org.pentaho.di.ui.util.HelpUtils;
public class ArgumentsDialog extends Dialog {
private static Class<?> PKG = ArgumentsDialog.class; // for i18n purposes, needed by Translator2!!
private TableView wArguments;
private ExecutionConfiguration configuration;
private Shell shell;
private Display display;
private PropsUI props;
/**
* Create the composite.
*
* @param parent
* @param style
*/
public ArgumentsDialog( final Shell parent, ExecutionConfiguration configuration, AbstractMeta abstractMeta ) {
super( parent );
this.configuration = configuration;
display = parent.getDisplay();
shell = new Shell( parent, SWT.DIALOG_TRIM | SWT.MIN | SWT.APPLICATION_MODAL );
props = PropsUI.getInstance();
props.setLook( shell );
shell.setImage( parent.getImage() );
shell.setLayout( new FormLayout() );
shell.setText( BaseMessages.getString( PKG, "ArgumentsDialog.Arguments.Label" ) );
ColumnInfo[] cArguments =
{ new ColumnInfo( BaseMessages.getString( PKG, "ArgumentsDialog.ArgumentsColumn.Argument" ),
ColumnInfo.COLUMN_TYPE_TEXT, false, true, 180 ), // Argument name
new ColumnInfo( BaseMessages.getString( PKG, "ArgumentsDialog.ArgumentsColumn.Value" ),
ColumnInfo.COLUMN_TYPE_TEXT, false, false, 172 ), // Actual value
};
int nrArguments = configuration.getArguments() != null ? configuration.getArguments().size() : 0;
wArguments =
new TableView( abstractMeta, shell, SWT.BORDER | SWT.FULL_SELECTION | SWT.MULTI, cArguments, nrArguments, false,
null, props, false );
FormData fd_argumentsTable = new FormData();
fd_argumentsTable.top = new FormAttachment( 0, 15 );
fd_argumentsTable.left = new FormAttachment( 0, 15 );
fd_argumentsTable.bottom = new FormAttachment( 0, 221 );
fd_argumentsTable.right = new FormAttachment( 0, 371 );
wArguments.setLayoutData( fd_argumentsTable );
Label separator = new Label( shell, SWT.SEPARATOR | SWT.HORIZONTAL );
FormData fd_separator = new FormData();
fd_separator.top = new FormAttachment( wArguments, 15 );
fd_separator.right = new FormAttachment( wArguments, 0, SWT.RIGHT );
fd_separator.left = new FormAttachment( 0, 15 );
separator.setLayoutData( fd_separator );
Button okButton = new Button( shell, SWT.NONE );
okButton.setText( "OK" );
FormData fd_okButton = new FormData();
if ( Const.isOSX() ) {
fd_okButton.left = new FormAttachment( 0, 215 );
} else {
fd_okButton.left = new FormAttachment( 0, 269 );
}
okButton.setLayoutData( fd_okButton );
okButton.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
ok();
}
} );
Button cancelButton = new Button( shell, SWT.NONE );
fd_okButton.top = new FormAttachment( cancelButton, 0, SWT.TOP );
fd_okButton.right = new FormAttachment( cancelButton, -4 );
cancelButton.setText( "Cancel" );
FormData fd_cancelButton = new FormData();
fd_cancelButton.top = new FormAttachment( separator, 13 );
fd_cancelButton.right = new FormAttachment( wArguments, 0, SWT.RIGHT );
cancelButton.setLayoutData( fd_cancelButton );
cancelButton.addSelectionListener( new SelectionAdapter() {
public void widgetSelected( SelectionEvent e ) {
dispose();
}
} );
Button btnHelp = new Button( shell, SWT.NONE );
btnHelp.setImage( GUIResource.getInstance().getImageHelpWeb() );
btnHelp.setText( BaseMessages.getString( PKG, "System.Button.Help" ) );
btnHelp.setToolTipText( BaseMessages.getString( PKG, "System.Tooltip.Help" ) );
FormData fd_btnHelp = new FormData();
fd_btnHelp.top = new FormAttachment( separator, 13 );
fd_btnHelp.left = new FormAttachment( separator, 0, SWT.LEFT );
btnHelp.setLayoutData( fd_btnHelp );
btnHelp.addSelectionListener( new SelectionAdapter() {
@Override
public void widgetSelected( SelectionEvent evt ) {
String docUrl = Const.getDocUrl( BaseMessages.getString( Spoon.class, "Spoon.ArgumentsDialog.Help" ) );
String docTitle = BaseMessages.getString( PKG, "ArgumentsDialog.docTitle" );
String docHeader = BaseMessages.getString( PKG, "ArgumentsDialog.docHeader" );
HelpUtils.openHelpDialog( parent.getShell(), docTitle, docUrl, docHeader );
}
} );
shell.setSize( 394, 319 );
getArgumentsData();
shell.open();
Rectangle shellBounds = getParent().getBounds();
Point dialogSize = shell.getSize();
shell.setLocation( shellBounds.x + ( shellBounds.width - dialogSize.x ) / 2, shellBounds.y + ( shellBounds.height
- dialogSize.y ) / 2 );
while ( !shell.isDisposed() ) {
if ( !display.readAndDispatch() ) {
display.sleep();
}
}
}
@Override
protected void checkSubclass() {
}
private void getArgumentsData() {
wArguments.clearAll( false );
List<String> argumentNames = new ArrayList<String>( configuration.getArguments().keySet() );
Collections.sort( argumentNames );
for ( int i = 0; i < argumentNames.size(); i++ ) {
String argumentName = argumentNames.get( i );
String argumentValue = configuration.getArguments().get( argumentName );
TableItem tableItem = new TableItem( wArguments.table, SWT.NONE );
tableItem.setText( 1, Const.NVL( argumentName, "" ) );
tableItem.setText( 2, Const.NVL( argumentValue, "" ) );
}
wArguments.removeEmptyRows();
wArguments.setRowNums();
wArguments.optWidth( true );
}
private void getInfoArguments() {
Map<String, String> map = new HashMap<String, String>();
int nrNonEmptyArguments = wArguments.nrNonEmpty();
for ( int i = 0; i < nrNonEmptyArguments; i++ ) {
TableItem tableItem = wArguments.getNonEmpty( i );
String varName = tableItem.getText( 1 );
String varValue = tableItem.getText( 2 );
if ( !Utils.isEmpty( varName ) ) {
map.put( varName, varValue );
}
}
configuration.setArguments( map );
}
protected void ok() {
if ( Const.isOSX() ) {
// OSX bug workaround.
//
wArguments.applyOSXChanges();
}
getInfoArguments();
dispose();
}
private void dispose() {
props.setScreen( new WindowProperty( shell ) );
shell.dispose();
}
}
| |
package org.hl7.fhir.instance.hapi.validation;
import static org.apache.commons.lang3.StringUtils.isBlank;
import java.io.*;
import java.util.*;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.StructureDefinition;
import org.hl7.fhir.instance.utils.IResourceValidator.BestPracticeWarningLevel;
import org.hl7.fhir.instance.utils.IResourceValidator.IdStatus;
import org.hl7.fhir.utilities.validation.ValidationMessage;
import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity;
import org.w3c.dom.*;
import org.xml.sax.InputSource;
import com.google.gson.*;
import ca.uhn.fhir.context.*;
import ca.uhn.fhir.rest.api.EncodingEnum;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.validation.IValidationContext;
import ca.uhn.fhir.validation.IValidatorModule;
public class FhirInstanceValidator extends BaseValidatorBridge implements IValidatorModule {
private static FhirContext ourHl7OrgCtx;
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirInstanceValidator.class);
private BestPracticeWarningLevel myBestPracticeWarningLevel;
private DocumentBuilderFactory myDocBuilderFactory;
private StructureDefinition myStructureDefintion;
private IValidationSupport myValidationSupport;
/**
* Constructor
*
* Uses {@link DefaultProfileValidationSupport} for {@link IValidationSupport
* validation support}
*/
public FhirInstanceValidator() {
this(new DefaultProfileValidationSupport());
}
/**
* Constructor which uses the given validation support
*
* @param theValidationSupport
* The validation support
*/
public FhirInstanceValidator(IValidationSupport theValidationSupport) {
myDocBuilderFactory = DocumentBuilderFactory.newInstance();
myDocBuilderFactory.setNamespaceAware(true);
myValidationSupport = theValidationSupport;
}
private String determineResourceName(Document theDocument) {
Element root = null;
NodeList list = theDocument.getChildNodes();
for (int i = 0; i < list.getLength(); i++) {
if (list.item(i) instanceof Element) {
root = (Element) list.item(i);
break;
}
}
root = theDocument.getDocumentElement();
return root.getLocalName();
}
/**
* Returns the "best practice" warning level (default is
* {@link BestPracticeWarningLevel#Hint}).
* <p>
* The FHIR Instance Validator has a number of checks for best practices in
* terms of FHIR usage. If this setting is set to
* {@link BestPracticeWarningLevel#Error}, any resource data which does not
* meet these best practices will be reported at the ERROR level. If this
* setting is set to {@link BestPracticeWarningLevel#Ignore}, best practice
* guielines will be ignored.
* </p>
*
* @see {@link #setBestPracticeWarningLevel(BestPracticeWarningLevel)}
*/
public BestPracticeWarningLevel getBestPracticeWarningLevel() {
return myBestPracticeWarningLevel;
}
/**
* Returns the {@link IValidationSupport validation support} in use by this
* validator. Default is an instance of
* {@link DefaultProfileValidationSupport} if the no-arguments constructor for
* this object was used.
*/
public IValidationSupport getValidationSupport() {
return myValidationSupport;
}
/**
* Sets the "best practice warning level". When validating, any deviations
* from best practices will be reported at this level.
* <p>
* The FHIR Instance Validator has a number of checks for best practices in
* terms of FHIR usage. If this setting is set to
* {@link BestPracticeWarningLevel#Error}, any resource data which does not
* meet these best practices will be reported at the ERROR level. If this
* setting is set to {@link BestPracticeWarningLevel#Ignore}, best practice
* guielines will be ignored.
* </p>
*
* @param theBestPracticeWarningLevel
* The level, must not be <code>null</code>
*/
public void setBestPracticeWarningLevel(BestPracticeWarningLevel theBestPracticeWarningLevel) {
Validate.notNull(theBestPracticeWarningLevel);
myBestPracticeWarningLevel = theBestPracticeWarningLevel;
}
public void setStructureDefintion(StructureDefinition theStructureDefintion) {
myStructureDefintion = theStructureDefintion;
}
/**
* Sets the {@link IValidationSupport validation support} in use by this
* validator. Default is an instance of
* {@link DefaultProfileValidationSupport} if the no-arguments constructor for
* this object was used.
*/
public void setValidationSupport(IValidationSupport theValidationSupport) {
myValidationSupport = theValidationSupport;
}
protected List<ValidationMessage> validate(final FhirContext theCtx, String theInput, EncodingEnum theEncoding) {
HapiWorkerContext workerContext = new HapiWorkerContext(theCtx, myValidationSupport);
org.hl7.fhir.instance.validation.InstanceValidator v;
try {
v = new org.hl7.fhir.instance.validation.InstanceValidator(workerContext);
} catch (Exception e) {
throw new ConfigurationException(e);
}
// v.setShouldCheckForIdPresence(false);
// v.setRequireResourceId(false);
v.setResourceIdRule(IdStatus.OPTIONAL);
v.setBestPracticeWarningLevel(myBestPracticeWarningLevel);
v.setAnyExtensionsAllowed(true);
v.setSuppressLoincSnomedMessages(true);
List<ValidationMessage> messages = new ArrayList<ValidationMessage>();
if (theEncoding == EncodingEnum.XML) {
Document document;
try {
DocumentBuilder builder = myDocBuilderFactory.newDocumentBuilder();
InputSource src = new InputSource(new StringReader(theInput));
document = builder.parse(src);
} catch (Exception e2) {
ourLog.error("Failure to parse XML input", e2);
ValidationMessage m = new ValidationMessage();
m.setLevel(IssueSeverity.FATAL);
m.setMessage("Failed to parse input, it does not appear to be valid XML:" + e2.getMessage());
return Collections.singletonList(m);
}
String resourceName = determineResourceName(document);
StructureDefinition profile = myStructureDefintion != null ? myStructureDefintion : loadProfileOrReturnNull(messages, theCtx, resourceName);
if (profile != null) {
try {
v.validate(messages, document, profile);
} catch (Exception e) {
throw new InternalErrorException("Unexpected failure while validating resource", e);
}
}
} else if (theEncoding == EncodingEnum.JSON) {
Gson gson = new GsonBuilder().create();
JsonObject json = gson.fromJson(theInput, JsonObject.class);
String resourceName = json.get("resourceType").getAsString();
StructureDefinition profile = myStructureDefintion != null ? myStructureDefintion : loadProfileOrReturnNull(messages, theCtx, resourceName);
if (profile != null) {
try {
v.validate(messages, json, profile);
} catch (Exception e) {
throw new InternalErrorException("Unexpected failure while validating resource", e);
}
}
} else {
throw new IllegalArgumentException("Unknown encoding: " + theEncoding);
}
for (int i = 0; i < messages.size(); i++) {
ValidationMessage next = messages.get(i);
if ("Binding has no source, so can't be checked".equals(next.getMessage())) {
messages.remove(i);
i--;
}
}
return messages;
}
@Override
protected List<ValidationMessage> validate(IValidationContext<?> theCtx) {
return validate(theCtx.getFhirContext(), theCtx.getResourceAsString(), theCtx.getResourceAsStringEncoding());
}
static FhirContext getHl7OrgDstu2Ctx(FhirContext theCtx) {
if (theCtx.getVersion().getVersion() == FhirVersionEnum.DSTU2_HL7ORG) {
return theCtx;
}
FhirContext retVal = ourHl7OrgCtx;
if (retVal == null) {
retVal = FhirContext.forDstu2Hl7Org();
ourHl7OrgCtx = retVal;
}
return retVal;
}
static StructureDefinition loadProfileOrReturnNull(List<ValidationMessage> theMessages, FhirContext theCtx,
String theResourceName) {
if (isBlank(theResourceName)) {
if (theMessages != null) {
theMessages.add(new ValidationMessage().setLevel(IssueSeverity.FATAL)
.setMessage("Could not determine resource type from request. Content appears invalid."));
}
return null;
}
String profileClasspath = theCtx.getVersion().getPathToSchemaDefinitions().replace("/schema", "/profile");
String profileCpName = profileClasspath + '/' + theResourceName.toLowerCase() + ".profile.xml";
String profileText;
try {
InputStream inputStream = FhirInstanceValidator.class.getResourceAsStream(profileCpName);
if (inputStream == null) {
if (theMessages != null) {
theMessages.add(new ValidationMessage().setLevel(IssueSeverity.FATAL)
.setMessage("No profile found for resource type " + theResourceName));
return null;
} else {
return null;
}
}
profileText = IOUtils.toString(inputStream, "UTF-8");
} catch (IOException e1) {
if (theMessages != null) {
theMessages.add(new ValidationMessage().setLevel(IssueSeverity.FATAL)
.setMessage("No profile found for resource type " + theResourceName));
}
return null;
}
StructureDefinition profile = getHl7OrgDstu2Ctx(theCtx).newXmlParser().parseResource(StructureDefinition.class,
profileText);
return profile;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.operator.scalar;
import org.testng.annotations.Test;
import static io.prestosql.spi.StandardErrorCode.DIVISION_BY_ZERO;
import static io.prestosql.spi.type.BigintType.BIGINT;
import static io.prestosql.spi.type.BooleanType.BOOLEAN;
import static io.prestosql.spi.type.DoubleType.DOUBLE;
import static io.prestosql.spi.type.IntegerType.INTEGER;
import static io.prestosql.spi.type.VarcharType.VARCHAR;
import static io.prestosql.spi.type.VarcharType.createVarcharType;
public class TestConditions
extends AbstractTestFunctions
{
@Test
public void testLike()
{
assertFunction("'_monkey_' like 'X_monkeyX_' escape 'X'", BOOLEAN, true);
assertFunction("'monkey' like 'monkey'", BOOLEAN, true);
assertFunction("'monkey' like 'mon%'", BOOLEAN, true);
assertFunction("'monkey' like 'mon_ey'", BOOLEAN, true);
assertFunction("'monkey' like 'm____y'", BOOLEAN, true);
assertFunction("'monkey' like 'dain'", BOOLEAN, false);
assertFunction("'monkey' like 'key'", BOOLEAN, false);
assertFunction("'_monkey_' like '\\_monkey\\_'", BOOLEAN, false);
assertFunction("'_monkey_' like 'X_monkeyX_' escape 'X'", BOOLEAN, true);
assertFunction("'_monkey_' like '_monkey_' escape ''", BOOLEAN, true);
assertFunction("'*?.(){}+|^$,\\' like '*?.(){}+|^$,\\' escape ''", BOOLEAN, true);
assertFunction("null like 'monkey'", BOOLEAN, null);
assertFunction("'monkey' like null", BOOLEAN, null);
assertFunction("'monkey' like 'monkey' escape null", BOOLEAN, null);
assertFunction("'_monkey_' not like 'X_monkeyX_' escape 'X'", BOOLEAN, false);
assertFunction("'monkey' not like 'monkey'", BOOLEAN, false);
assertFunction("'monkey' not like 'mon%'", BOOLEAN, false);
assertFunction("'monkey' not like 'mon_ey'", BOOLEAN, false);
assertFunction("'monkey' not like 'm____y'", BOOLEAN, false);
assertFunction("'monkey' not like 'dain'", BOOLEAN, true);
assertFunction("'monkey' not like 'key'", BOOLEAN, true);
assertFunction("'_monkey_' not like '\\_monkey\\_'", BOOLEAN, true);
assertFunction("'_monkey_' not like 'X_monkeyX_' escape 'X'", BOOLEAN, false);
assertFunction("'_monkey_' not like '_monkey_' escape ''", BOOLEAN, false);
assertFunction("'*?.(){}+|^$,\\' not like '*?.(){}+|^$,\\' escape ''", BOOLEAN, false);
assertFunction("null not like 'monkey'", BOOLEAN, null);
assertFunction("'monkey' not like null", BOOLEAN, null);
assertFunction("'monkey' not like 'monkey' escape null", BOOLEAN, null);
assertInvalidFunction("'monkey' like 'monkey' escape 'foo'", "Escape string must be a single character");
}
@Test
public void testDistinctFrom()
{
assertFunction("NULL IS DISTINCT FROM NULL", BOOLEAN, false);
assertFunction("NULL IS DISTINCT FROM 1", BOOLEAN, true);
assertFunction("1 IS DISTINCT FROM NULL", BOOLEAN, true);
assertFunction("1 IS DISTINCT FROM 1", BOOLEAN, false);
assertFunction("1 IS DISTINCT FROM 2", BOOLEAN, true);
assertFunction("NULL IS NOT DISTINCT FROM NULL", BOOLEAN, true);
assertFunction("NULL IS NOT DISTINCT FROM 1", BOOLEAN, false);
assertFunction("1 IS NOT DISTINCT FROM NULL", BOOLEAN, false);
assertFunction("1 IS NOT DISTINCT FROM 1", BOOLEAN, true);
assertFunction("1 IS NOT DISTINCT FROM 2", BOOLEAN, false);
}
@Test
public void testBetween()
{
assertFunction("3 between 2 and 4", BOOLEAN, true);
assertFunction("3 between 3 and 3", BOOLEAN, true);
assertFunction("3 between 2 and 3", BOOLEAN, true);
assertFunction("3 between 3 and 4", BOOLEAN, true);
assertFunction("3 between 4 and 2", BOOLEAN, false);
assertFunction("2 between 3 and 4", BOOLEAN, false);
assertFunction("5 between 3 and 4", BOOLEAN, false);
assertFunction("null between 2 and 4", BOOLEAN, null);
assertFunction("3 between null and 4", BOOLEAN, null);
assertFunction("3 between 2 and null", BOOLEAN, null);
assertFunction("3 between 3 and 4000000000", BOOLEAN, true);
assertFunction("5 between 3 and 4000000000", BOOLEAN, true);
assertFunction("3 between BIGINT '3' and 4", BOOLEAN, true);
assertFunction("BIGINT '3' between 3 and 4", BOOLEAN, true);
assertFunction("'c' between 'b' and 'd'", BOOLEAN, true);
assertFunction("'c' between 'c' and 'c'", BOOLEAN, true);
assertFunction("'c' between 'b' and 'c'", BOOLEAN, true);
assertFunction("'c' between 'c' and 'd'", BOOLEAN, true);
assertFunction("'c' between 'd' and 'b'", BOOLEAN, false);
assertFunction("'b' between 'c' and 'd'", BOOLEAN, false);
assertFunction("'e' between 'c' and 'd'", BOOLEAN, false);
assertFunction("null between 'b' and 'd'", BOOLEAN, null);
assertFunction("'c' between null and 'd'", BOOLEAN, null);
assertFunction("'c' between 'b' and null", BOOLEAN, null);
assertFunction("3 not between 2 and 4", BOOLEAN, false);
assertFunction("3 not between 3 and 3", BOOLEAN, false);
assertFunction("3 not between 2 and 3", BOOLEAN, false);
assertFunction("3 not between 3 and 4", BOOLEAN, false);
assertFunction("3 not between 4 and 2", BOOLEAN, true);
assertFunction("2 not between 3 and 4", BOOLEAN, true);
assertFunction("5 not between 3 and 4", BOOLEAN, true);
assertFunction("null not between 2 and 4", BOOLEAN, null);
assertFunction("3 not between null and 4", BOOLEAN, null);
assertFunction("3 not between 2 and null", BOOLEAN, null);
assertFunction("'c' not between 'b' and 'd'", BOOLEAN, false);
assertFunction("'c' not between 'c' and 'c'", BOOLEAN, false);
assertFunction("'c' not between 'b' and 'c'", BOOLEAN, false);
assertFunction("'c' not between 'c' and 'd'", BOOLEAN, false);
assertFunction("'c' not between 'd' and 'b'", BOOLEAN, true);
assertFunction("'b' not between 'c' and 'd'", BOOLEAN, true);
assertFunction("'e' not between 'c' and 'd'", BOOLEAN, true);
assertFunction("null not between 'b' and 'd'", BOOLEAN, null);
assertFunction("'c' not between null and 'd'", BOOLEAN, null);
assertFunction("'c' not between 'b' and null", BOOLEAN, null);
}
@Test
public void testIn()
{
assertFunction("3 in (2, 4, 3, 5)", BOOLEAN, true);
assertFunction("3 not in (2, 4, 3, 5)", BOOLEAN, false);
assertFunction("3 in (2, 4, 9, 5)", BOOLEAN, false);
assertFunction("3 in (2, null, 3, 5)", BOOLEAN, true);
assertFunction("'foo' in ('bar', 'baz', 'foo', 'blah')", BOOLEAN, true);
assertFunction("'foo' in ('bar', 'baz', 'buz', 'blah')", BOOLEAN, false);
assertFunction("'foo' in ('bar', null, 'foo', 'blah')", BOOLEAN, true);
assertFunction("(null in (2, null, 3, 5)) is null", BOOLEAN, true);
assertFunction("(3 in (2, null)) is null", BOOLEAN, true);
assertFunction("(null not in (2, null, 3, 5)) is null", BOOLEAN, true);
assertFunction("(3 not in (2, null)) is null", BOOLEAN, true);
}
@Test
public void testInDoesNotShortCircuit()
{
assertInvalidFunction("3 in (2, 4, 3, 5 / 0)", DIVISION_BY_ZERO);
}
@Test
public void testSearchCase()
{
assertFunction("case " +
"when true then 33 " +
"end",
INTEGER,
33);
assertFunction("case " +
"when true then BIGINT '33' " +
"end",
BIGINT,
33L);
assertFunction("case " +
"when false then 1 " +
"else 33 " +
"end",
INTEGER,
33);
assertFunction("case " +
"when false then 10000000000 " +
"else 33 " +
"end",
BIGINT,
33L);
assertFunction("case " +
"when false then 1 " +
"when false then 1 " +
"when true then 33 " +
"else 1 " +
"end",
INTEGER,
33);
assertFunction("case " +
"when false then BIGINT '1' " +
"when false then 1 " +
"when true then 33 " +
"else 1 " +
"end",
BIGINT,
33L);
assertFunction("case " +
"when false then 10000000000 " +
"when false then 1 " +
"when true then 33 " +
"else 1 " +
"end",
BIGINT,
33L);
assertFunction("case " +
"when false then 1 " +
"end",
INTEGER,
null);
assertFunction("case " +
"when true then null " +
"else 'foo' " +
"end",
createVarcharType(3),
null);
assertFunction("case " +
"when null then 1 " +
"when true then 33 " +
"end",
INTEGER,
33);
assertFunction("case " +
"when null then 10000000000 " +
"when true then 33 " +
"end",
BIGINT,
33L);
assertFunction("case " +
"when false then 1.0E0 " +
"when true then 33 " +
"end",
DOUBLE,
33.0);
assertDecimalFunction("case " +
"when false then DECIMAL '2.2' " +
"when true then DECIMAL '2.2' " +
"end",
decimal("2.2"));
assertDecimalFunction("case " +
"when false then DECIMAL '1234567890.0987654321' " +
"when true then DECIMAL '3.3' " +
"end",
decimal("0000000003.3000000000"));
assertDecimalFunction("case " +
"when false then 1 " +
"when true then DECIMAL '2.2' " +
"end",
decimal("0000000002.2"));
assertDecimalFunction("case " +
"when false then 2.2 " +
"when true then 2.2 " +
"end",
decimal("2.2"));
assertDecimalFunction("case " +
"when false then 1234567890.0987654321 " +
"when true then 3.3 " +
"end",
decimal("0000000003.3000000000"));
assertDecimalFunction("case " +
"when false then 1 " +
"when true then 2.2 " +
"end",
decimal("0000000002.2"));
assertFunction("case " +
"when false then DECIMAL '1.1' " +
"when true then 33.0E0 " +
"end",
DOUBLE,
33.0);
assertFunction("case " +
"when false then 1.1 " +
"when true then 33.0E0 " +
"end",
DOUBLE,
33.0);
}
@Test
public void testSimpleCase()
{
assertFunction("case true " +
"when true then cast(null as varchar) " +
"else 'foo' " +
"end",
VARCHAR,
null);
assertFunction("case true " +
"when true then 33 " +
"end",
INTEGER,
33);
assertFunction("case true " +
"when true then BIGINT '33' " +
"end",
BIGINT,
33L);
assertFunction("case true " +
"when false then 1 " +
"else 33 " +
"end",
INTEGER,
33);
assertFunction("case true " +
"when false then 10000000000 " +
"else 33 " +
"end",
BIGINT,
33L);
assertFunction("case true " +
"when false then 1 " +
"when false then 1 " +
"when true then 33 " +
"else 1 " +
"end",
INTEGER,
33);
assertFunction("case true " +
"when false then 1 " +
"end",
INTEGER,
null);
assertFunction("case true " +
"when true then null " +
"else 'foo' " +
"end",
createVarcharType(3),
null);
assertFunction("case true " +
"when null then 10000000000 " +
"when true then 33 " +
"end",
BIGINT,
33L);
assertFunction("case true " +
"when null then 1 " +
"when true then 33 " +
"end",
INTEGER,
33);
assertFunction("case null " +
"when true then 1 " +
"else 33 " +
"end",
INTEGER,
33);
assertFunction("case true " +
"when false then 1.0E0 " +
"when true then 33 " +
"end",
DOUBLE,
33.0);
assertDecimalFunction("case true " +
"when false then DECIMAL '2.2' " +
"when true then DECIMAL '2.2' " +
"end",
decimal("2.2"));
assertDecimalFunction("case true " +
"when false then DECIMAL '1234567890.0987654321' " +
"when true then DECIMAL '3.3' " +
"end",
decimal("0000000003.3000000000"));
assertDecimalFunction("case true " +
"when false then 1 " +
"when true then DECIMAL '2.2' " +
"end",
decimal("0000000002.2"));
assertFunction("case true " +
"when false then DECIMAL '1.1' " +
"when true then 33.0E0 " +
"end",
DOUBLE,
33.0);
assertDecimalFunction("case true " +
"when false then 2.2 " +
"when true then 2.2 " +
"end",
decimal("2.2"));
assertDecimalFunction("case true " +
"when false then 1234567890.0987654321 " +
"when true then 3.3 " +
"end",
decimal("0000000003.3000000000"));
assertDecimalFunction("case true " +
"when false then 1 " +
"when true then 2.2 " +
"end",
decimal("0000000002.2"));
assertFunction("case true " +
"when false then 1.1 " +
"when true then 33.0E0 " +
"end",
DOUBLE,
33.0);
}
}
| |
package com.yahoo.labs.samoa.moa.core;
/*
* #%L
* SAMOA
* %%
* Copyright (C) 2007 University of Waikato, Hamilton, New Zealand
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.net.URLClassLoader;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
/**
* Class for discovering classes via reflection in the java class path.
*
* @author Richard Kirkby (rkirkby@cs.waikato.ac.nz)
* @version $Revision: 7 $
*/
public class AutoClassDiscovery {
protected static final Map<String, String[]> cachedClassNames = new HashMap<String, String[]>();
public static String[] findClassNames(String packageNameToSearch) {
String[] cached = cachedClassNames.get(packageNameToSearch);
if (cached == null) {
HashSet<String> classNames = new HashSet<String>();
/*StringTokenizer pathTokens = new StringTokenizer(System
.getProperty("java.class.path"), File.pathSeparator);*/
String packageDirName = packageNameToSearch.replace('.',
File.separatorChar);
String packageJarName = packageNameToSearch.length() > 0 ? (packageNameToSearch.replace('.', '/') + "/")
: "";
String part = "";
AutoClassDiscovery adc = new AutoClassDiscovery();
URLClassLoader sysLoader = (URLClassLoader) adc.getClass().getClassLoader();
URL[] cl_urls = sysLoader.getURLs();
for (int i = 0; i < cl_urls.length; i++) {
part = cl_urls[i].toString();
if (part.startsWith("file:")) {
part = part.replace(" ", "%20");
try {
File temp = new File(new java.net.URI(part));
part = temp.getAbsolutePath();
} catch (URISyntaxException e) {
e.printStackTrace();
}
}
// find classes
ArrayList<File> files = new ArrayList<File>();
File dir = new File(part);
if (dir.isDirectory()) {
File root = new File(dir.toString() + File.separatorChar + packageDirName);
String[] names = findClassesInDirectoryRecursive(root, "");
classNames.addAll(Arrays.asList(names));
} else {
try {
JarFile jar = new JarFile(part);
Enumeration<JarEntry> jarEntries = jar.entries();
while (jarEntries.hasMoreElements()) {
String jarEntry = jarEntries.nextElement().getName();
if (jarEntry.startsWith(packageJarName)) {
String relativeName = jarEntry.substring(packageJarName.length());
if (relativeName.endsWith(".class")) {
relativeName = relativeName.replace('/',
'.');
classNames.add(relativeName.substring(0,
relativeName.length()
- ".class".length()));
}
}
}
} catch (IOException ignored) {
// ignore unreadable files
}
}
}
/*while (pathTokens.hasMoreElements()) {
String pathToSearch = pathTokens.nextElement().toString();
if (pathToSearch.endsWith(".jar")) {
try {
JarFile jar = new JarFile(pathToSearch);
Enumeration<JarEntry> jarEntries = jar.entries();
while (jarEntries.hasMoreElements()) {
String jarEntry = jarEntries.nextElement()
.getName();
if (jarEntry.startsWith(packageJarName)) {
String relativeName = jarEntry
.substring(packageJarName.length());
if (relativeName.endsWith(".class")) {
relativeName = relativeName.replace('/',
'.');
classNames.add(relativeName.substring(0,
relativeName.length()
- ".class".length()));
}
}
}
} catch (IOException ignored) {
// ignore unreadable files
}
} else {
File root = new File(pathToSearch + File.separatorChar
+ packageDirName);
String[] names = findClassesInDirectoryRecursive(root, "");
for (String name : names) {
classNames.add(name);
}
}
} */
cached = classNames.toArray(new String[classNames.size()]);
Arrays.sort(cached);
cachedClassNames.put(packageNameToSearch, cached);
}
return cached;
}
protected static String[] findClassesInDirectoryRecursive(File root,
String packagePath) {
HashSet<String> classNames = new HashSet<String>();
if (root.isDirectory()) {
String[] list = root.list();
for (String string : list) {
if (string.endsWith(".class")) {
classNames.add(packagePath
+ string.substring(0, string.length()
- ".class".length()));
} else {
File testDir = new File(root.getPath() + File.separatorChar
+ string);
if (testDir.isDirectory()) {
String[] names = findClassesInDirectoryRecursive(
testDir, packagePath + string + ".");
classNames.addAll(Arrays.asList(names));
}
}
}
}
return classNames.toArray(new String[classNames.size()]);
}
public static Class[] findClassesOfType(String packageNameToSearch,
Class<?> typeDesired) {
ArrayList<Class<?>> classesFound = new ArrayList<Class<?>>();
String[] classNames = findClassNames(packageNameToSearch);
for (String className : classNames) {
String fullName = packageNameToSearch.length() > 0 ? (packageNameToSearch
+ "." + className)
: className;
if (isPublicConcreteClassOfType(fullName, typeDesired)) {
try {
classesFound.add(Class.forName(fullName));
} catch (Exception ignored) {
// ignore classes that we cannot instantiate
}
}
}
return classesFound.toArray(new Class[classesFound.size()]);
}
public static boolean isPublicConcreteClassOfType(String className,
Class<?> typeDesired) {
Class<?> testClass = null;
try {
testClass = Class.forName(className);
} catch (Exception e) {
return false;
}
int classModifiers = testClass.getModifiers();
return (java.lang.reflect.Modifier.isPublic(classModifiers)
&& !java.lang.reflect.Modifier.isAbstract(classModifiers)
&& typeDesired.isAssignableFrom(testClass) && hasEmptyConstructor(testClass));
}
public static boolean hasEmptyConstructor(Class<?> type) {
try {
type.getConstructor();
return true;
} catch (Exception ignored) {
return false;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.startup;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.security.Principal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.servlet.Servlet;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import org.apache.catalina.Container;
import org.apache.catalina.Context;
import org.apache.catalina.Engine;
import org.apache.catalina.Globals;
import org.apache.catalina.Host;
import org.apache.catalina.Lifecycle;
import org.apache.catalina.LifecycleEvent;
import org.apache.catalina.LifecycleException;
import org.apache.catalina.LifecycleListener;
import org.apache.catalina.Realm;
import org.apache.catalina.Server;
import org.apache.catalina.Service;
import org.apache.catalina.Wrapper;
import org.apache.catalina.authenticator.NonLoginAuthenticator;
import org.apache.catalina.connector.Connector;
import org.apache.catalina.core.NamingContextListener;
import org.apache.catalina.core.StandardContext;
import org.apache.catalina.core.StandardEngine;
import org.apache.catalina.core.StandardHost;
import org.apache.catalina.core.StandardServer;
import org.apache.catalina.core.StandardService;
import org.apache.catalina.core.StandardWrapper;
import org.apache.catalina.realm.GenericPrincipal;
import org.apache.catalina.realm.RealmBase;
import org.apache.tomcat.util.descriptor.web.LoginConfig;
// TODO: lazy init for the temp dir - only when a JSP is compiled or
// get temp dir is called we need to create it. This will avoid the
// need for the baseDir
// TODO: allow contexts without a base dir - i.e.
// only programmatic. This would disable the default servlet.
/**
* Minimal tomcat starter for embedding/unit tests.
*
* Tomcat supports multiple styles of configuration and
* startup - the most common and stable is server.xml-based,
* implemented in org.apache.catalina.startup.Bootstrap.
*
* This class is for use in apps that embed tomcat.
* Requirements:
*
* - all tomcat classes and possibly servlets are in the classpath.
* ( for example all is in one big jar, or in eclipse CP, or in any other
* combination )
*
* - we need one temporary directory for work files
*
* - no config file is required. This class provides methods to
* use if you have a webapp with a web.xml file, but it is
* optional - you can use your own servlets.
*
* There are a variety of 'add' methods to configure servlets and webapps. These
* methods, by default, create a simple in-memory security realm and apply it.
* If you need more complex security processing, you can define a subclass of
* this class.
*
* This class provides a set of convenience methods for configuring webapp
* contexts, all overloads of the method <code>addWebapp</code>. These methods
* create a webapp context, configure it, and then add it to a {@link Host}.
* They do not use a global default web.xml; rather, they add a lifecycle
* listener that adds the standard DefaultServlet, JSP processing, and welcome
* files.
*
* In complex cases, you may prefer to use the ordinary Tomcat API to create
* webapp contexts; for example, you might need to install a custom Loader
* before the call to {@link Host#addChild(Container)}. To replicate the basic
* behavior of the <code>addWebapp</code> methods, you may want to call two
* methods of this class: {@link #noDefaultWebXmlPath()} and
* {@link #getDefaultWebXmlListener()}.
*
* {@link #getDefaultWebXmlListener()} returns a {@link LifecycleListener} that
* adds the standard DefaultServlet, JSP processing, and welcome files. If you
* add this listener, you must prevent Tomcat from applying any standard global
* web.xml with ...
*
* {@link #noDefaultWebXmlPath()} returns a dummy pathname to configure to
* prevent {@link ContextConfig} from trying to apply a global web.xml file.
*
* This class provides a main() and few simple CLI arguments,
* see setters for doc. It can be used for simple tests and
* demo.
*
* @see <a href="http://svn.apache.org/repos/asf/tomcat/trunk/test/org/apache/catalina/startup/TestTomcat.java">TestTomcat</a>
* @author Costin Manolache
*/
public class Tomcat {
// Single engine, service, server, connector - few cases need more,
// they can use server.xml
protected Server server;
protected Service service;
protected Engine engine;
protected Connector connector; // for more - customize the classes
// To make it a bit easier to config for the common case
// ( one host, one context ).
protected Host host;
// TODO: it's easy to add support for more hosts - but is it
// really needed ?
// TODO: allow use of in-memory connector
protected int port = 8080;
protected String hostname = "localhost";
protected String basedir;
private final Map<String, String> userPass = new HashMap<>();
private final Map<String, List<String>> userRoles = new HashMap<>();
private final Map<String, Principal> userPrincipals = new HashMap<>();
public Tomcat() {
// NOOP
}
/**
* Tomcat needs a directory for temp files. This should be the
* first method called.
*
* By default, if this method is not called, we use:
* - system properties - catalina.base, catalina.home
* - $PWD/tomcat.$PORT
* (/tmp doesn't seem a good choice for security).
*
* TODO: disable work dir if not needed ( no jsp, etc ).
*/
public void setBaseDir(String basedir) {
this.basedir = basedir;
}
/**
* Set the port for the default connector. Must
* be called before start().
*/
public void setPort(int port) {
this.port = port;
}
/**
* The the hostname of the default host, default is
* 'localhost'.
*/
public void setHostname(String s) {
hostname = s;
}
/**
* This is equivalent to adding a web application to Tomcat's webapps
* directory. The equivalent of the default web.xml will be applied to the
* web application and any WEB-INF/web.xml and META-INF/context.xml packaged
* with the application will be processed normally. Normal web fragment and
* {@link javax.servlet.ServletContainerInitializer} processing will be
* applied.
*
* @throws ServletException
*/
public Context addWebapp(String contextPath, String baseDir)
throws ServletException {
return addWebapp(getHost(), contextPath, baseDir);
}
/**
* Add a context - programmatic mode, no default web.xml used. This means
* that there is no JSP support (no JSP servlet), no default servlet and
* no web socket support unless explicitly enabled via the programmatic
* interface. There is also no
* {@link javax.servlet.ServletContainerInitializer} processing and no
* annotation processing. If a
* {@link javax.servlet.ServletContainerInitializer} is added
* programmatically, there will still be no scanning for
* {@link javax.servlet.annotation.HandlesTypes} matches.
*
* API calls equivalent with web.xml:
*
* context-param
* ctx.addParameter("name", "value");
*
*
* error-page
* ErrorPage ep = new ErrorPage();
* ep.setErrorCode(500);
* ep.setLocation("/error.html");
* ctx.addErrorPage(ep);
*
* ctx.addMimeMapping("ext", "type");
*
* Note: If you reload the Context, all your configuration will be lost. If
* you need reload support, consider using a LifecycleListener to provide
* your configuration.
*
* TODO: add the rest
*
* @param contextPath "" for root context.
* @param baseDir base dir for the context, for static files. Must exist,
* relative to the server home
*/
public Context addContext(String contextPath,
String baseDir) {
return addContext(getHost(), contextPath, baseDir);
}
/**
* Equivalent with
* <servlet><servlet-name><servlet-class>.
*
* In general it is better/faster to use the method that takes a
* Servlet as param - this one can be used if the servlet is not
* commonly used, and want to avoid loading all deps.
* ( for example: jsp servlet )
*
* You can customize the returned servlet, ex:
*
* wrapper.addInitParameter("name", "value");
*
* @param contextPath Context to add Servlet to
* @param servletName Servlet name (used in mappings)
* @param servletClass The class to be used for the Servlet
* @return The wrapper for the servlet
*/
public Wrapper addServlet(String contextPath,
String servletName,
String servletClass) {
Container ctx = getHost().findChild(contextPath);
return addServlet((Context) ctx, servletName, servletClass);
}
/**
* Static version of {@link #addServlet(String, String, String)}
* @param ctx Context to add Servlet to
* @param servletName Servlet name (used in mappings)
* @param servletClass The class to be used for the Servlet
* @return The wrapper for the servlet
*/
public static Wrapper addServlet(Context ctx,
String servletName,
String servletClass) {
// will do class for name and set init params
Wrapper sw = ctx.createWrapper();
sw.setServletClass(servletClass);
sw.setName(servletName);
ctx.addChild(sw);
return sw;
}
/**
* Add an existing Servlet to the context with no class.forName or
* initialisation.
* @param contextPath Context to add Servlet to
* @param servletName Servlet name (used in mappings)
* @param servlet The Servlet to add
* @return The wrapper for the servlet
*/
public Wrapper addServlet(String contextPath,
String servletName,
Servlet servlet) {
Container ctx = getHost().findChild(contextPath);
return addServlet((Context) ctx, servletName, servlet);
}
/**
* Static version of {@link #addServlet(String, String, Servlet)}.
* @param ctx Context to add Servlet to
* @param servletName Servlet name (used in mappings)
* @param servlet The Servlet to add
* @return The wrapper for the servlet
*/
public static Wrapper addServlet(Context ctx,
String servletName,
Servlet servlet) {
// will do class for name and set init params
Wrapper sw = new ExistingStandardWrapper(servlet);
sw.setName(servletName);
ctx.addChild(sw);
return sw;
}
/**
* Initialise the server.
*
* @throws LifecycleException
*/
public void init() throws LifecycleException {
getServer();
getConnector();
server.init();
}
/**
* Start the server.
*
* @throws LifecycleException
*/
public void start() throws LifecycleException {
getServer();
getConnector();
server.start();
}
/**
* Stop the server.
*
* @throws LifecycleException
*/
public void stop() throws LifecycleException {
getServer();
server.stop();
}
/**
* Destroy the server. This object cannot be used once this method has been
* called.
*/
public void destroy() throws LifecycleException {
getServer();
server.destroy();
// Could null out objects here
}
/**
* Add a user for the in-memory realm. All created apps use this
* by default, can be replaced using setRealm().
*
*/
public void addUser(String user, String pass) {
userPass.put(user, pass);
}
/**
* @see #addUser(String, String)
*/
public void addRole(String user, String role) {
List<String> roles = userRoles.get(user);
if (roles == null) {
roles = new ArrayList<>();
userRoles.put(user, roles);
}
roles.add(role);
}
// ------- Extra customization -------
// You can tune individual tomcat objects, using internal APIs
/**
* Get the default http connector. You can set more
* parameters - the port is already initialized.
*
* Alternatively, you can construct a Connector and set any params,
* then call addConnector(Connector)
*
* @return A connector object that can be customized
*/
public Connector getConnector() {
getServer();
if (connector != null) {
return connector;
}
// This will load Apr connector if available,
// default to nio. I'm having strange problems with apr
// XXX: jfclere weird... Don't add the AprLifecycleListener then.
// and for the use case the speed benefit wouldn't matter.
connector = new Connector("HTTP/1.1");
// connector = new Connector("org.apache.coyote.http11.Http11Protocol");
connector.setPort(port);
service.addConnector( connector );
return connector;
}
public void setConnector(Connector connector) {
this.connector = connector;
}
/**
* Get the service object. Can be used to add more
* connectors and few other global settings.
*/
public Service getService() {
getServer();
return service;
}
/**
* Sets the current host - all future webapps will
* be added to this host. When tomcat starts, the
* host will be the default host.
*
* @param host
*/
public void setHost(Host host) {
this.host = host;
}
public Host getHost() {
if (host == null) {
host = new StandardHost();
host.setName(hostname);
getEngine().addChild( host );
}
return host;
}
/**
* Access to the engine, for further customization.
*/
public Engine getEngine() {
if(engine == null ) {
getServer();
engine = new StandardEngine();
engine.setName( "Tomcat" );
engine.setDefaultHost(hostname);
engine.setRealm(createDefaultRealm());
service.setContainer(engine);
}
return engine;
}
/**
* Get the server object. You can add listeners and few more
* customizations. JNDI is disabled by default.
*/
public Server getServer() {
if (server != null) {
return server;
}
System.setProperty("catalina.useNaming", "false");
server = new StandardServer();
initBaseDir();
server.setPort( -1 );
service = new StandardService();
service.setName("Tomcat");
server.addService( service );
return server;
}
/**
* @see #addContext(String, String)
*/
public Context addContext(Host host, String contextPath, String dir) {
return addContext(host, contextPath, contextPath, dir);
}
/**
* @see #addContext(String, String)
*/
public Context addContext(Host host, String contextPath, String contextName,
String dir) {
silence(host, contextPath);
Context ctx = new StandardContext();
ctx.setName(contextName);
ctx.setPath(contextPath);
ctx.setDocBase(dir);
ctx.addLifecycleListener(new FixContextListener());
if (host == null) {
getHost().addChild(ctx);
} else {
host.addChild(ctx);
}
return ctx;
}
/**
* @see #addWebapp(String, String)
*/
public Context addWebapp(Host host, String url, String path) {
return addWebapp(host, url, url, path);
}
/**
* @see #addWebapp(String, String)
*/
public Context addWebapp(Host host, String url, String name, String path) {
silence(host, url);
Context ctx = new StandardContext();
ctx.setName(name);
ctx.setPath(url);
ctx.setDocBase(path);
ctx.addLifecycleListener(new DefaultWebXmlListener());
ctx.setConfigFile(getWebappConfigFile(path, url));
ContextConfig ctxCfg = new ContextConfig();
ctx.addLifecycleListener(ctxCfg);
// prevent it from looking ( if it finds one - it'll have dup error )
ctxCfg.setDefaultWebXml(noDefaultWebXmlPath());
if (host == null) {
getHost().addChild(ctx);
} else {
host.addChild(ctx);
}
return ctx;
}
/**
* Return a listener that provides the required configuration items for JSP
* processing. From the standard Tomcat global web.xml. Pass this to
* {@link Context#addLifecycleListener(LifecycleListener)} and then pass the
* result of {@link #noDefaultWebXmlPath()} to
* {@link ContextConfig#setDefaultWebXml(String)}.
* @return a listener object that configures default JSP processing.
*/
public LifecycleListener getDefaultWebXmlListener() {
return new DefaultWebXmlListener();
}
/**
* @return a pathname to pass to
* {@link ContextConfig#setDefaultWebXml(String)} when using
* {@link #getDefaultWebXmlListener()}.
*/
public String noDefaultWebXmlPath() {
return Constants.NoDefaultWebXml;
}
// ---------- Helper methods and classes -------------------
/**
* Create an in-memory realm. You can replace it for contexts with a real
* one. The Realm created here will be added to the Engine by default and
* may be replaced at the Engine level or over-ridden (as per normal Tomcat
* behaviour) at the Host or Context level.
*/
protected Realm createDefaultRealm() {
return new RealmBase() {
@Override
protected String getName() {
return "Simple";
}
@Override
protected String getPassword(String username) {
return userPass.get(username);
}
@Override
protected Principal getPrincipal(String username) {
Principal p = userPrincipals.get(username);
if (p == null) {
String pass = userPass.get(username);
if (pass != null) {
p = new GenericPrincipal(username, pass,
userRoles.get(username));
userPrincipals.put(username, p);
}
}
return p;
}
};
}
protected void initBaseDir() {
String catalinaHome = System.getProperty(Globals.CATALINA_HOME_PROP);
if (basedir == null) {
basedir = System.getProperty(Globals.CATALINA_BASE_PROP);
}
if (basedir == null) {
basedir = catalinaHome;
}
if (basedir == null) {
// Create a temp dir.
basedir = System.getProperty("user.dir") +
"/tomcat." + port;
}
File baseFile = new File(basedir);
baseFile.mkdirs();
try {
baseFile = baseFile.getCanonicalFile();
} catch (IOException e) {
baseFile = baseFile.getAbsoluteFile();
}
server.setCatalinaBase(baseFile);
System.setProperty(Globals.CATALINA_BASE_PROP, baseFile.getPath());
basedir = baseFile.getPath();
if (catalinaHome == null) {
server.setCatalinaHome(baseFile);
} else {
File homeFile = new File(catalinaHome);
homeFile.mkdirs();
try {
homeFile = homeFile.getCanonicalFile();
} catch (IOException e) {
homeFile = homeFile.getAbsoluteFile();
}
server.setCatalinaHome(homeFile);
}
System.setProperty(Globals.CATALINA_HOME_PROP,
server.getCatalinaHome().getPath());
}
static final String[] silences = new String[] {
"org.apache.coyote.http11.Http11Protocol",
"org.apache.catalina.core.StandardService",
"org.apache.catalina.core.StandardEngine",
"org.apache.catalina.startup.ContextConfig",
"org.apache.catalina.core.ApplicationContext",
"org.apache.catalina.core.AprLifecycleListener"
};
/**
* Controls if the loggers will be silenced or not.
* @param silent <code>true</code> sets the log level to WARN for the
* loggers that log information on Tomcat start up. This
* prevents the usual startup information being logged.
* <code>false</code> sets the log level to the default
* level of INFO.
*/
public void setSilent(boolean silent) {
for (String s : silences) {
if (silent) {
Logger.getLogger(s).setLevel(Level.WARNING);
} else {
Logger.getLogger(s).setLevel(Level.INFO);
}
}
}
private void silence(Host host, String ctx) {
Logger.getLogger(getLoggerName(host, ctx)).setLevel(Level.WARNING);
}
private String getLoggerName(Host host, String ctx) {
String loggerName = "org.apache.catalina.core.ContainerBase.[default].[";
if (host == null) {
loggerName += getHost().getName();
} else {
loggerName += host.getName();
}
loggerName += "].[";
loggerName += ctx;
loggerName += "]";
return loggerName;
}
/**
* Enables JNDI naming which is disabled by default. Server must implement
* {@link Lifecycle} in order for the {@link NamingContextListener} to be
* used.
*
*/
public void enableNaming() {
// Make sure getServer() has been called as that is where naming is
// disabled
getServer();
server.addLifecycleListener(new NamingContextListener());
System.setProperty("catalina.useNaming", "true");
String value = "org.apache.naming";
String oldValue =
System.getProperty(javax.naming.Context.URL_PKG_PREFIXES);
if (oldValue != null) {
if (oldValue.contains(value)) {
value = oldValue;
} else {
value = value + ":" + oldValue;
}
}
System.setProperty(javax.naming.Context.URL_PKG_PREFIXES, value);
value = System.getProperty
(javax.naming.Context.INITIAL_CONTEXT_FACTORY);
if (value == null) {
System.setProperty
(javax.naming.Context.INITIAL_CONTEXT_FACTORY,
"org.apache.naming.java.javaURLContextFactory");
}
}
/**
* Provide default configuration for a context. This is the programmatic
* equivalent of the default web.xml.
*
* TODO: in normal Tomcat, if default-web.xml is not found, use this
* method
*
* @param contextPath The context to set the defaults for
*/
public void initWebappDefaults(String contextPath) {
Container ctx = getHost().findChild(contextPath);
initWebappDefaults((Context) ctx);
}
/**
* Static version of {@link #initWebappDefaults(String)}
* @param ctx The context to set the defaults for
*/
public static void initWebappDefaults(Context ctx) {
// Default servlet
Wrapper servlet = addServlet(
ctx, "default", "org.apache.catalina.servlets.DefaultServlet");
servlet.setLoadOnStartup(1);
servlet.setOverridable(true);
// JSP servlet (by class name - to avoid loading all deps)
servlet = addServlet(
ctx, "jsp", "org.apache.jasper.servlet.JspServlet");
servlet.addInitParameter("fork", "false");
servlet.setLoadOnStartup(3);
servlet.setOverridable(true);
// Servlet mappings
ctx.addServletMapping("/", "default");
ctx.addServletMapping("*.jsp", "jsp");
ctx.addServletMapping("*.jspx", "jsp");
// Sessions
ctx.setSessionTimeout(30);
// MIME mappings
for (int i = 0; i < DEFAULT_MIME_MAPPINGS.length;) {
ctx.addMimeMapping(DEFAULT_MIME_MAPPINGS[i++],
DEFAULT_MIME_MAPPINGS[i++]);
}
// Welcome files
ctx.addWelcomeFile("index.html");
ctx.addWelcomeFile("index.htm");
ctx.addWelcomeFile("index.jsp");
}
/**
* Fix startup sequence - required if you don't use web.xml.
*
* The start() method in context will set 'configured' to false - and
* expects a listener to set it back to true.
*/
public static class FixContextListener implements LifecycleListener {
@Override
public void lifecycleEvent(LifecycleEvent event) {
try {
Context context = (Context) event.getLifecycle();
if (event.getType().equals(Lifecycle.CONFIGURE_START_EVENT)) {
context.setConfigured(true);
}
// LoginConfig is required to process @ServletSecurity
// annotations
if (context.getLoginConfig() == null) {
context.setLoginConfig(
new LoginConfig("NONE", null, null, null));
context.getPipeline().addValve(new NonLoginAuthenticator());
}
} catch (ClassCastException e) {
return;
}
}
}
/**
* Fix reload - required if reloading and using programmatic configuration.
* When a context is reloaded, any programmatic configuration is lost. This
* listener sets the equivalent of conf/web.xml when the context starts.
*/
public static class DefaultWebXmlListener implements LifecycleListener {
@Override
public void lifecycleEvent(LifecycleEvent event) {
if (Lifecycle.BEFORE_START_EVENT.equals(event.getType())) {
initWebappDefaults((Context) event.getLifecycle());
}
}
}
/**
* Helper class for wrapping existing servlets. This disables servlet
* lifecycle and normal reloading, but also reduces overhead and provide
* more direct control over the servlet.
*/
public static class ExistingStandardWrapper extends StandardWrapper {
private final Servlet existing;
@SuppressWarnings("deprecation")
public ExistingStandardWrapper( Servlet existing ) {
this.existing = existing;
if (existing instanceof javax.servlet.SingleThreadModel) {
singleThreadModel = true;
instancePool = new Stack<>();
}
this.asyncSupported = hasAsync();
}
public boolean hasAsync() {
if (isAsyncSupported()) return true;
boolean result = false;
Class<?> clazz = existing.getClass();
if (clazz.isAnnotationPresent(WebServlet.class)) {
WebServlet ws = clazz.getAnnotation(WebServlet.class);
result = ws.asyncSupported();
}
return result;
}
@Override
public synchronized Servlet loadServlet() throws ServletException {
if (singleThreadModel) {
Servlet instance;
try {
instance = existing.getClass().newInstance();
} catch (InstantiationException e) {
throw new ServletException(e);
} catch (IllegalAccessException e) {
throw new ServletException(e);
}
instance.init(facade);
return instance;
} else {
if (!instanceInitialized) {
existing.init(facade);
instanceInitialized = true;
}
return existing;
}
}
@Override
public long getAvailable() {
return 0;
}
@Override
public boolean isUnavailable() {
return false;
}
@Override
public Servlet getServlet() {
return existing;
}
@Override
public String getServletClass() {
return existing.getClass().getName();
}
}
/**
* TODO: would a properties resource be better ? Or just parsing
* /etc/mime.types ?
* This is needed because we don't use the default web.xml, where this
* is encoded.
*/
private static final String[] DEFAULT_MIME_MAPPINGS = {
"abs", "audio/x-mpeg",
"ai", "application/postscript",
"aif", "audio/x-aiff",
"aifc", "audio/x-aiff",
"aiff", "audio/x-aiff",
"aim", "application/x-aim",
"art", "image/x-jg",
"asf", "video/x-ms-asf",
"asx", "video/x-ms-asf",
"au", "audio/basic",
"avi", "video/x-msvideo",
"avx", "video/x-rad-screenplay",
"bcpio", "application/x-bcpio",
"bin", "application/octet-stream",
"bmp", "image/bmp",
"body", "text/html",
"cdf", "application/x-cdf",
"cer", "application/pkix-cert",
"class", "application/java",
"cpio", "application/x-cpio",
"csh", "application/x-csh",
"css", "text/css",
"dib", "image/bmp",
"doc", "application/msword",
"dtd", "application/xml-dtd",
"dv", "video/x-dv",
"dvi", "application/x-dvi",
"eps", "application/postscript",
"etx", "text/x-setext",
"exe", "application/octet-stream",
"gif", "image/gif",
"gtar", "application/x-gtar",
"gz", "application/x-gzip",
"hdf", "application/x-hdf",
"hqx", "application/mac-binhex40",
"htc", "text/x-component",
"htm", "text/html",
"html", "text/html",
"ief", "image/ief",
"jad", "text/vnd.sun.j2me.app-descriptor",
"jar", "application/java-archive",
"java", "text/x-java-source",
"jnlp", "application/x-java-jnlp-file",
"jpe", "image/jpeg",
"jpeg", "image/jpeg",
"jpg", "image/jpeg",
"js", "application/javascript",
"jsf", "text/plain",
"jspf", "text/plain",
"kar", "audio/midi",
"latex", "application/x-latex",
"m3u", "audio/x-mpegurl",
"mac", "image/x-macpaint",
"man", "text/troff",
"mathml", "application/mathml+xml",
"me", "text/troff",
"mid", "audio/midi",
"midi", "audio/midi",
"mif", "application/x-mif",
"mov", "video/quicktime",
"movie", "video/x-sgi-movie",
"mp1", "audio/mpeg",
"mp2", "audio/mpeg",
"mp3", "audio/mpeg",
"mp4", "video/mp4",
"mpa", "audio/mpeg",
"mpe", "video/mpeg",
"mpeg", "video/mpeg",
"mpega", "audio/x-mpeg",
"mpg", "video/mpeg",
"mpv2", "video/mpeg2",
"nc", "application/x-netcdf",
"oda", "application/oda",
"odb", "application/vnd.oasis.opendocument.database",
"odc", "application/vnd.oasis.opendocument.chart",
"odf", "application/vnd.oasis.opendocument.formula",
"odg", "application/vnd.oasis.opendocument.graphics",
"odi", "application/vnd.oasis.opendocument.image",
"odm", "application/vnd.oasis.opendocument.text-master",
"odp", "application/vnd.oasis.opendocument.presentation",
"ods", "application/vnd.oasis.opendocument.spreadsheet",
"odt", "application/vnd.oasis.opendocument.text",
"otg", "application/vnd.oasis.opendocument.graphics-template",
"oth", "application/vnd.oasis.opendocument.text-web",
"otp", "application/vnd.oasis.opendocument.presentation-template",
"ots", "application/vnd.oasis.opendocument.spreadsheet-template ",
"ott", "application/vnd.oasis.opendocument.text-template",
"ogx", "application/ogg",
"ogv", "video/ogg",
"oga", "audio/ogg",
"ogg", "audio/ogg",
"spx", "audio/ogg",
"flac", "audio/flac",
"anx", "application/annodex",
"axa", "audio/annodex",
"axv", "video/annodex",
"xspf", "application/xspf+xml",
"pbm", "image/x-portable-bitmap",
"pct", "image/pict",
"pdf", "application/pdf",
"pgm", "image/x-portable-graymap",
"pic", "image/pict",
"pict", "image/pict",
"pls", "audio/x-scpls",
"png", "image/png",
"pnm", "image/x-portable-anymap",
"pnt", "image/x-macpaint",
"ppm", "image/x-portable-pixmap",
"ppt", "application/vnd.ms-powerpoint",
"pps", "application/vnd.ms-powerpoint",
"ps", "application/postscript",
"psd", "image/vnd.adobe.photoshop",
"qt", "video/quicktime",
"qti", "image/x-quicktime",
"qtif", "image/x-quicktime",
"ras", "image/x-cmu-raster",
"rdf", "application/rdf+xml",
"rgb", "image/x-rgb",
"rm", "application/vnd.rn-realmedia",
"roff", "text/troff",
"rtf", "application/rtf",
"rtx", "text/richtext",
"sh", "application/x-sh",
"shar", "application/x-shar",
/*"shtml", "text/x-server-parsed-html",*/
"sit", "application/x-stuffit",
"snd", "audio/basic",
"src", "application/x-wais-source",
"sv4cpio", "application/x-sv4cpio",
"sv4crc", "application/x-sv4crc",
"svg", "image/svg+xml",
"svgz", "image/svg+xml",
"swf", "application/x-shockwave-flash",
"t", "text/troff",
"tar", "application/x-tar",
"tcl", "application/x-tcl",
"tex", "application/x-tex",
"texi", "application/x-texinfo",
"texinfo", "application/x-texinfo",
"tif", "image/tiff",
"tiff", "image/tiff",
"tr", "text/troff",
"tsv", "text/tab-separated-values",
"txt", "text/plain",
"ulw", "audio/basic",
"ustar", "application/x-ustar",
"vxml", "application/voicexml+xml",
"xbm", "image/x-xbitmap",
"xht", "application/xhtml+xml",
"xhtml", "application/xhtml+xml",
"xls", "application/vnd.ms-excel",
"xml", "application/xml",
"xpm", "image/x-xpixmap",
"xsl", "application/xml",
"xslt", "application/xslt+xml",
"xul", "application/vnd.mozilla.xul+xml",
"xwd", "image/x-xwindowdump",
"vsd", "application/vnd.visio",
"wav", "audio/x-wav",
"wbmp", "image/vnd.wap.wbmp",
"wml", "text/vnd.wap.wml",
"wmlc", "application/vnd.wap.wmlc",
"wmls", "text/vnd.wap.wmlsc",
"wmlscriptc", "application/vnd.wap.wmlscriptc",
"wmv", "video/x-ms-wmv",
"wrl", "model/vrml",
"wspolicy", "application/wspolicy+xml",
"Z", "application/x-compress",
"z", "application/x-compress",
"zip", "application/zip"
};
protected URL getWebappConfigFile(String path, String url) {
File docBase = new File(path);
if (docBase.isDirectory()) {
return getWebappConfigFileFromDirectory(docBase, url);
} else {
return getWebappConfigFileFromJar(docBase, url);
}
}
private URL getWebappConfigFileFromDirectory(File docBase, String url) {
URL result = null;
File webAppContextXml = new File(docBase, Constants.ApplicationContextXml);
if (webAppContextXml.exists()) {
try {
result = webAppContextXml.toURI().toURL();
} catch (MalformedURLException e) {
Logger.getLogger(getLoggerName(getHost(), url)).log(Level.WARNING,
"Unable to determine web application context.xml " + docBase, e);
}
}
return result;
}
private URL getWebappConfigFileFromJar(File docBase, String url) {
URL result = null;
JarFile jar = null;
try {
jar = new JarFile(docBase);
JarEntry entry = jar.getJarEntry(Constants.ApplicationContextXml);
if (entry != null) {
result = new URL("jar:" + docBase.toURI().toString() + "!/"
+ Constants.ApplicationContextXml);
}
} catch (IOException e) {
Logger.getLogger(getLoggerName(getHost(), url)).log(Level.WARNING,
"Unable to determine web application context.xml " + docBase, e);
} finally {
if (jar != null) {
try {
jar.close();
} catch (IOException e) {
// ignore
}
}
}
return result;
}
}
| |
package com.imagpay.ui;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import com.imagpay.MessageHandler;
import com.imagpay.Settings;
import com.imagpay.SwipeEvent;
import com.imagpay.SwipeHandler;
import com.imagpay.SwipeListener;
import com.imagpay.iMagPayApp;
public class SettingsActivity extends MyActivity {
private SwipeHandler _handler;
private Settings _settings;
private MessageHandler _msg;
private Handler _ui;
private boolean _testFlag = false;
private String _sn, _des, _bdk, _ksn;
private boolean _plainFlag = false;
private boolean _desFlag = false;
private boolean _dukptFlag = false;
private boolean _snFlag = false;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
// requestWindowFeature(Window.FEATURE_NO_TITLE);
requestWindowFeature(Window.FEATURE_CUSTOM_TITLE);
setContentView(R.layout.activity_settings);
getWindow().setFeatureInt(Window.FEATURE_CUSTOM_TITLE,
R.layout.title_settings);
getWindow().setSoftInputMode(
WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN);
_handler = new SwipeHandler(this);
_settings = new Settings(_handler);
_msg = new MessageHandler((TextView) findViewById(R.id.status));
_ui = new Handler(Looper.myLooper());
_handler.addSwipeListener(new SwipeListener() {
@Override
public void onReadData(SwipeEvent event) {
}
@Override
public void onParseData(SwipeEvent event) {
if (_testFlag)
return;
String result = event.getValue();
// hex string message
sendMessage("Final(16)=>% " + result);
if (_snFlag && result.startsWith("31 " + _sn)) {
sendMessage("Written SN successfully!");
_snFlag = false;
} else if (_plainFlag && result.startsWith("6f 6b 3f")) {
sendMessage("Set plain text mode successfully!");
_plainFlag = false;
} else if (_desFlag && result.startsWith("6f 6b 3f")) {
sendMessage("Set 3DES mode successfully!");
_desFlag = false;
} else if (_dukptFlag && result.startsWith("6f 6b 3f")) {
sendMessage("Set DUKPT mode successfully!");
_dukptFlag = false;
} else if (_desFlag && result.startsWith("31 " + _des)) {
_settings.writeMode(Settings.TYPE_3DES);
} else if (_dukptFlag && result.startsWith("31 " + _bdk + " " + _ksn)) {
_settings.writeMode(Settings.TYPE_DUKPT);
// _settings.writeMode(Settings.TYPE_DUKPT_HSM);
}
}
@Override
public void onDisconnected(SwipeEvent event) {
sendMessage("Device is disconnected!");
toggleConnectStatus();
}
@Override
public void onConnected(SwipeEvent event) {
sendMessage("Device is connected!");
checkDevice();
}
@Override
public void onStarted(SwipeEvent event) {
if (_testFlag)
return;
sendMessage("Device is started");
toggleConnectStatus();
}
@Override
public void onStopped(SwipeEvent event) {
if (_testFlag)
return;
sendMessage("Device is stopped");
toggleConnectStatus();
}
});
Button btn = (Button) findViewById(R.id.btnen);
btn.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
getApp().setLanguage(iMagPayApp.LOCAL_EN);
Intent intent = new Intent();
intent.setClass(SettingsActivity.this, SettingsActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(intent);
}
});
btn = (Button) findViewById(R.id.btncn);
btn.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
getApp().setLanguage(iMagPayApp.LOCAL_CN);
Intent intent = new Intent();
intent.setClass(SettingsActivity.this, SettingsActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(intent);
}
});
btn = (Button) findViewById(R.id.btnplaintext);
btn.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
_snFlag = false;
_plainFlag = true;
_desFlag = false;
_dukptFlag = false;
_settings.writeMode(Settings.TYPE_PLAINTEXT);
}
});
btn = (Button) findViewById(R.id.btndes);
btn.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
_snFlag = false;
_plainFlag = false;
_desFlag = true;
_dukptFlag = false;
_des = "aa aa aa aa bb bb bb bb cc cc cc cc dd dd dd dd";
_settings.writeDESKey(_des);
}
});
btn = (Button) findViewById(R.id.btndukpt);
btn.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
_snFlag = false;
_plainFlag = false;
_desFlag = false;
_dukptFlag = true;
_bdk = "aa aa aa aa bb bb bb bb cc cc cc cc dd dd dd dd";
_ksn = "11 22 33 44 55 66 77 00 00 00";
_settings.writeDUKPTKey(_bdk, _ksn);
}
});
btn = (Button) findViewById(R.id.btnwsn);
btn.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
_snFlag = true;
_plainFlag = false;
_desFlag = false;
_dukptFlag = false;
_sn = "201309300001";
_sn = _settings.formatSN(_sn);
sendMessage("Writing SN: " + _sn);
_settings.writeSN(_sn);
}
});
btn = (Button) findViewById(R.id.btnrsn);
btn.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
sendMessage("SN: " + _settings.getSN());
}
});
btn = (Button) findViewById(R.id.btnclear);
btn.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
_handler.powerOff();
_handler.clearEnvironment();
checkDevice();
}
});
btn = (Button) findViewById(R.id.btnback);
btn.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
finishAll();
}
});
}
private void checkDevice() {
new Thread(new Runnable() {
@Override
public void run() {
if (!_handler.isConnected()) {
toggleConnectStatus();
return;
}
if (_handler.isPowerOn()) {
toggleConnectStatus();
return;
}
if (_handler.isWritable()) {
sendMessage("Device is ready");
_handler.powerOn();
} else {
_testFlag = false;
sendMessage("Please wait! testing parameter now");
if (_handler.test() && _handler.isWritable()) {
_testFlag = false;
sendMessage("Device is ready");
_handler.powerOn();
} else {
_testFlag = false;
sendMessage("Device is not supported or Please close some audio effects(SRS/DOLBY/BEATS/JAZZ/Classic...) and insert device!");
}
}
toggleConnectStatus();
}
}).start();
}
private void toggleConnectStatus() {
_ui.postDelayed(new Runnable() {
@Override
public void run() {
if (_handler.isConnected() && _handler.isPowerOn()
&& _handler.isReadable()) {
ImageView iv = (ImageView) findViewById(R.id.connect);
iv.setVisibility(View.VISIBLE);
iv = (ImageView) findViewById(R.id.disconnect);
iv.setVisibility(View.INVISIBLE);
} else {
ImageView iv = (ImageView) findViewById(R.id.connect);
iv.setVisibility(View.INVISIBLE);
iv = (ImageView) findViewById(R.id.disconnect);
iv.setVisibility(View.VISIBLE);
}
}
}, 500);
}
private void sendMessage(String msg) {
_msg.sendMessage(msg);
}
public void onStart() {
super.onStart();
checkDevice();
}
public void onStop() {
_handler.powerOff();
super.onStop();
}
public void onDestroy() {
_handler.onDestroy();
super.onDestroy();
}
}
| |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.spanner.v1.stub;
import static com.google.cloud.spanner.v1.SpannerClient.ListSessionsPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.ServerStreamingCallSettings;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.protobuf.Empty;
import com.google.spanner.v1.BatchCreateSessionsRequest;
import com.google.spanner.v1.BatchCreateSessionsResponse;
import com.google.spanner.v1.BeginTransactionRequest;
import com.google.spanner.v1.CommitRequest;
import com.google.spanner.v1.CommitResponse;
import com.google.spanner.v1.CreateSessionRequest;
import com.google.spanner.v1.DeleteSessionRequest;
import com.google.spanner.v1.ExecuteBatchDmlRequest;
import com.google.spanner.v1.ExecuteBatchDmlResponse;
import com.google.spanner.v1.ExecuteSqlRequest;
import com.google.spanner.v1.GetSessionRequest;
import com.google.spanner.v1.ListSessionsRequest;
import com.google.spanner.v1.ListSessionsResponse;
import com.google.spanner.v1.PartialResultSet;
import com.google.spanner.v1.PartitionQueryRequest;
import com.google.spanner.v1.PartitionReadRequest;
import com.google.spanner.v1.PartitionResponse;
import com.google.spanner.v1.ReadRequest;
import com.google.spanner.v1.ResultSet;
import com.google.spanner.v1.RollbackRequest;
import com.google.spanner.v1.Session;
import com.google.spanner.v1.Transaction;
import java.io.IOException;
import java.util.List;
import javax.annotation.Generated;
import org.threeten.bp.Duration;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link SpannerStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (spanner.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the total timeout of createSession to 30 seconds:
*
* <pre>{@code
* SpannerStubSettings.Builder spannerSettingsBuilder = SpannerStubSettings.newBuilder();
* spannerSettingsBuilder
* .createSessionSettings()
* .setRetrySettings(
* spannerSettingsBuilder
* .createSessionSettings()
* .getRetrySettings()
* .toBuilder()
* .setTotalTimeout(Duration.ofSeconds(30))
* .build());
* SpannerStubSettings spannerSettings = spannerSettingsBuilder.build();
* }</pre>
*/
@Generated("by gapic-generator-java")
public class SpannerStubSettings extends StubSettings<SpannerStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder()
.add("https://www.googleapis.com/auth/cloud-platform")
.add("https://www.googleapis.com/auth/spanner.data")
.build();
private final UnaryCallSettings<CreateSessionRequest, Session> createSessionSettings;
private final UnaryCallSettings<BatchCreateSessionsRequest, BatchCreateSessionsResponse>
batchCreateSessionsSettings;
private final UnaryCallSettings<GetSessionRequest, Session> getSessionSettings;
private final PagedCallSettings<
ListSessionsRequest, ListSessionsResponse, ListSessionsPagedResponse>
listSessionsSettings;
private final UnaryCallSettings<DeleteSessionRequest, Empty> deleteSessionSettings;
private final UnaryCallSettings<ExecuteSqlRequest, ResultSet> executeSqlSettings;
private final ServerStreamingCallSettings<ExecuteSqlRequest, PartialResultSet>
executeStreamingSqlSettings;
private final UnaryCallSettings<ExecuteBatchDmlRequest, ExecuteBatchDmlResponse>
executeBatchDmlSettings;
private final UnaryCallSettings<ReadRequest, ResultSet> readSettings;
private final ServerStreamingCallSettings<ReadRequest, PartialResultSet> streamingReadSettings;
private final UnaryCallSettings<BeginTransactionRequest, Transaction> beginTransactionSettings;
private final UnaryCallSettings<CommitRequest, CommitResponse> commitSettings;
private final UnaryCallSettings<RollbackRequest, Empty> rollbackSettings;
private final UnaryCallSettings<PartitionQueryRequest, PartitionResponse> partitionQuerySettings;
private final UnaryCallSettings<PartitionReadRequest, PartitionResponse> partitionReadSettings;
private static final PagedListDescriptor<ListSessionsRequest, ListSessionsResponse, Session>
LIST_SESSIONS_PAGE_STR_DESC =
new PagedListDescriptor<ListSessionsRequest, ListSessionsResponse, Session>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListSessionsRequest injectToken(ListSessionsRequest payload, String token) {
return ListSessionsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListSessionsRequest injectPageSize(ListSessionsRequest payload, int pageSize) {
return ListSessionsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListSessionsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListSessionsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Session> extractResources(ListSessionsResponse payload) {
return payload.getSessionsList() == null
? ImmutableList.<Session>of()
: payload.getSessionsList();
}
};
private static final PagedListResponseFactory<
ListSessionsRequest, ListSessionsResponse, ListSessionsPagedResponse>
LIST_SESSIONS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListSessionsRequest, ListSessionsResponse, ListSessionsPagedResponse>() {
@Override
public ApiFuture<ListSessionsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListSessionsRequest, ListSessionsResponse> callable,
ListSessionsRequest request,
ApiCallContext context,
ApiFuture<ListSessionsResponse> futureResponse) {
PageContext<ListSessionsRequest, ListSessionsResponse, Session> pageContext =
PageContext.create(callable, LIST_SESSIONS_PAGE_STR_DESC, request, context);
return ListSessionsPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to createSession. */
public UnaryCallSettings<CreateSessionRequest, Session> createSessionSettings() {
return createSessionSettings;
}
/** Returns the object with the settings used for calls to batchCreateSessions. */
public UnaryCallSettings<BatchCreateSessionsRequest, BatchCreateSessionsResponse>
batchCreateSessionsSettings() {
return batchCreateSessionsSettings;
}
/** Returns the object with the settings used for calls to getSession. */
public UnaryCallSettings<GetSessionRequest, Session> getSessionSettings() {
return getSessionSettings;
}
/** Returns the object with the settings used for calls to listSessions. */
public PagedCallSettings<ListSessionsRequest, ListSessionsResponse, ListSessionsPagedResponse>
listSessionsSettings() {
return listSessionsSettings;
}
/** Returns the object with the settings used for calls to deleteSession. */
public UnaryCallSettings<DeleteSessionRequest, Empty> deleteSessionSettings() {
return deleteSessionSettings;
}
/** Returns the object with the settings used for calls to executeSql. */
public UnaryCallSettings<ExecuteSqlRequest, ResultSet> executeSqlSettings() {
return executeSqlSettings;
}
/** Returns the object with the settings used for calls to executeStreamingSql. */
public ServerStreamingCallSettings<ExecuteSqlRequest, PartialResultSet>
executeStreamingSqlSettings() {
return executeStreamingSqlSettings;
}
/** Returns the object with the settings used for calls to executeBatchDml. */
public UnaryCallSettings<ExecuteBatchDmlRequest, ExecuteBatchDmlResponse>
executeBatchDmlSettings() {
return executeBatchDmlSettings;
}
/** Returns the object with the settings used for calls to read. */
public UnaryCallSettings<ReadRequest, ResultSet> readSettings() {
return readSettings;
}
/** Returns the object with the settings used for calls to streamingRead. */
public ServerStreamingCallSettings<ReadRequest, PartialResultSet> streamingReadSettings() {
return streamingReadSettings;
}
/** Returns the object with the settings used for calls to beginTransaction. */
public UnaryCallSettings<BeginTransactionRequest, Transaction> beginTransactionSettings() {
return beginTransactionSettings;
}
/** Returns the object with the settings used for calls to commit. */
public UnaryCallSettings<CommitRequest, CommitResponse> commitSettings() {
return commitSettings;
}
/** Returns the object with the settings used for calls to rollback. */
public UnaryCallSettings<RollbackRequest, Empty> rollbackSettings() {
return rollbackSettings;
}
/** Returns the object with the settings used for calls to partitionQuery. */
public UnaryCallSettings<PartitionQueryRequest, PartitionResponse> partitionQuerySettings() {
return partitionQuerySettings;
}
/** Returns the object with the settings used for calls to partitionRead. */
public UnaryCallSettings<PartitionReadRequest, PartitionResponse> partitionReadSettings() {
return partitionReadSettings;
}
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
public SpannerStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcSpannerStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
public static String getDefaultEndpoint() {
return "spanner.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "spanner.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder()
.setMaxInboundMessageSize(Integer.MAX_VALUE);
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
@BetaApi("The surface for customizing headers is not stable yet and may change in the future.")
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(SpannerStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected SpannerStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
createSessionSettings = settingsBuilder.createSessionSettings().build();
batchCreateSessionsSettings = settingsBuilder.batchCreateSessionsSettings().build();
getSessionSettings = settingsBuilder.getSessionSettings().build();
listSessionsSettings = settingsBuilder.listSessionsSettings().build();
deleteSessionSettings = settingsBuilder.deleteSessionSettings().build();
executeSqlSettings = settingsBuilder.executeSqlSettings().build();
executeStreamingSqlSettings = settingsBuilder.executeStreamingSqlSettings().build();
executeBatchDmlSettings = settingsBuilder.executeBatchDmlSettings().build();
readSettings = settingsBuilder.readSettings().build();
streamingReadSettings = settingsBuilder.streamingReadSettings().build();
beginTransactionSettings = settingsBuilder.beginTransactionSettings().build();
commitSettings = settingsBuilder.commitSettings().build();
rollbackSettings = settingsBuilder.rollbackSettings().build();
partitionQuerySettings = settingsBuilder.partitionQuerySettings().build();
partitionReadSettings = settingsBuilder.partitionReadSettings().build();
}
/** Builder for SpannerStubSettings. */
public static class Builder extends StubSettings.Builder<SpannerStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final UnaryCallSettings.Builder<CreateSessionRequest, Session> createSessionSettings;
private final UnaryCallSettings.Builder<BatchCreateSessionsRequest, BatchCreateSessionsResponse>
batchCreateSessionsSettings;
private final UnaryCallSettings.Builder<GetSessionRequest, Session> getSessionSettings;
private final PagedCallSettings.Builder<
ListSessionsRequest, ListSessionsResponse, ListSessionsPagedResponse>
listSessionsSettings;
private final UnaryCallSettings.Builder<DeleteSessionRequest, Empty> deleteSessionSettings;
private final UnaryCallSettings.Builder<ExecuteSqlRequest, ResultSet> executeSqlSettings;
private final ServerStreamingCallSettings.Builder<ExecuteSqlRequest, PartialResultSet>
executeStreamingSqlSettings;
private final UnaryCallSettings.Builder<ExecuteBatchDmlRequest, ExecuteBatchDmlResponse>
executeBatchDmlSettings;
private final UnaryCallSettings.Builder<ReadRequest, ResultSet> readSettings;
private final ServerStreamingCallSettings.Builder<ReadRequest, PartialResultSet>
streamingReadSettings;
private final UnaryCallSettings.Builder<BeginTransactionRequest, Transaction>
beginTransactionSettings;
private final UnaryCallSettings.Builder<CommitRequest, CommitResponse> commitSettings;
private final UnaryCallSettings.Builder<RollbackRequest, Empty> rollbackSettings;
private final UnaryCallSettings.Builder<PartitionQueryRequest, PartitionResponse>
partitionQuerySettings;
private final UnaryCallSettings.Builder<PartitionReadRequest, PartitionResponse>
partitionReadSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"retry_policy_3_codes",
ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE)));
definitions.put(
"retry_policy_2_codes",
ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE)));
definitions.put(
"retry_policy_1_codes",
ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE)));
definitions.put(
"no_retry_0_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(250L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelay(Duration.ofMillis(32000L))
.setInitialRpcTimeout(Duration.ofMillis(30000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ofMillis(30000L))
.setTotalTimeout(Duration.ofMillis(30000L))
.build();
definitions.put("retry_policy_3_params", settings);
settings =
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(250L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelay(Duration.ofMillis(32000L))
.setInitialRpcTimeout(Duration.ofMillis(60000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ofMillis(60000L))
.setTotalTimeout(Duration.ofMillis(60000L))
.build();
definitions.put("retry_policy_2_params", settings);
settings =
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(250L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelay(Duration.ofMillis(32000L))
.setInitialRpcTimeout(Duration.ofMillis(3600000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ofMillis(3600000L))
.setTotalTimeout(Duration.ofMillis(3600000L))
.build();
definitions.put("retry_policy_1_params", settings);
settings =
RetrySettings.newBuilder()
.setInitialRpcTimeout(Duration.ofMillis(3600000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ofMillis(3600000L))
.setTotalTimeout(Duration.ofMillis(3600000L))
.build();
definitions.put("no_retry_0_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
createSessionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
batchCreateSessionsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getSessionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listSessionsSettings = PagedCallSettings.newBuilder(LIST_SESSIONS_PAGE_STR_FACT);
deleteSessionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
executeSqlSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
executeStreamingSqlSettings = ServerStreamingCallSettings.newBuilder();
executeBatchDmlSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
readSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
streamingReadSettings = ServerStreamingCallSettings.newBuilder();
beginTransactionSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
commitSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
rollbackSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
partitionQuerySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
partitionReadSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
createSessionSettings,
batchCreateSessionsSettings,
getSessionSettings,
listSessionsSettings,
deleteSessionSettings,
executeSqlSettings,
executeBatchDmlSettings,
readSettings,
beginTransactionSettings,
commitSettings,
rollbackSettings,
partitionQuerySettings,
partitionReadSettings);
initDefaults(this);
}
protected Builder(SpannerStubSettings settings) {
super(settings);
createSessionSettings = settings.createSessionSettings.toBuilder();
batchCreateSessionsSettings = settings.batchCreateSessionsSettings.toBuilder();
getSessionSettings = settings.getSessionSettings.toBuilder();
listSessionsSettings = settings.listSessionsSettings.toBuilder();
deleteSessionSettings = settings.deleteSessionSettings.toBuilder();
executeSqlSettings = settings.executeSqlSettings.toBuilder();
executeStreamingSqlSettings = settings.executeStreamingSqlSettings.toBuilder();
executeBatchDmlSettings = settings.executeBatchDmlSettings.toBuilder();
readSettings = settings.readSettings.toBuilder();
streamingReadSettings = settings.streamingReadSettings.toBuilder();
beginTransactionSettings = settings.beginTransactionSettings.toBuilder();
commitSettings = settings.commitSettings.toBuilder();
rollbackSettings = settings.rollbackSettings.toBuilder();
partitionQuerySettings = settings.partitionQuerySettings.toBuilder();
partitionReadSettings = settings.partitionReadSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
createSessionSettings,
batchCreateSessionsSettings,
getSessionSettings,
listSessionsSettings,
deleteSessionSettings,
executeSqlSettings,
executeBatchDmlSettings,
readSettings,
beginTransactionSettings,
commitSettings,
rollbackSettings,
partitionQuerySettings,
partitionReadSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setEndpoint(getDefaultEndpoint());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.createSessionSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params"));
builder
.batchCreateSessionsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params"));
builder
.getSessionSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params"));
builder
.listSessionsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params"));
builder
.deleteSessionSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params"));
builder
.executeSqlSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params"));
builder
.executeStreamingSqlSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.executeBatchDmlSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params"));
builder
.readSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params"));
builder
.streamingReadSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_0_params"));
builder
.beginTransactionSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params"));
builder
.commitSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_1_params"));
builder
.rollbackSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params"));
builder
.partitionQuerySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params"));
builder
.partitionReadSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_3_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_3_params"));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to createSession. */
public UnaryCallSettings.Builder<CreateSessionRequest, Session> createSessionSettings() {
return createSessionSettings;
}
/** Returns the builder for the settings used for calls to batchCreateSessions. */
public UnaryCallSettings.Builder<BatchCreateSessionsRequest, BatchCreateSessionsResponse>
batchCreateSessionsSettings() {
return batchCreateSessionsSettings;
}
/** Returns the builder for the settings used for calls to getSession. */
public UnaryCallSettings.Builder<GetSessionRequest, Session> getSessionSettings() {
return getSessionSettings;
}
/** Returns the builder for the settings used for calls to listSessions. */
public PagedCallSettings.Builder<
ListSessionsRequest, ListSessionsResponse, ListSessionsPagedResponse>
listSessionsSettings() {
return listSessionsSettings;
}
/** Returns the builder for the settings used for calls to deleteSession. */
public UnaryCallSettings.Builder<DeleteSessionRequest, Empty> deleteSessionSettings() {
return deleteSessionSettings;
}
/** Returns the builder for the settings used for calls to executeSql. */
public UnaryCallSettings.Builder<ExecuteSqlRequest, ResultSet> executeSqlSettings() {
return executeSqlSettings;
}
/** Returns the builder for the settings used for calls to executeStreamingSql. */
public ServerStreamingCallSettings.Builder<ExecuteSqlRequest, PartialResultSet>
executeStreamingSqlSettings() {
return executeStreamingSqlSettings;
}
/** Returns the builder for the settings used for calls to executeBatchDml. */
public UnaryCallSettings.Builder<ExecuteBatchDmlRequest, ExecuteBatchDmlResponse>
executeBatchDmlSettings() {
return executeBatchDmlSettings;
}
/** Returns the builder for the settings used for calls to read. */
public UnaryCallSettings.Builder<ReadRequest, ResultSet> readSettings() {
return readSettings;
}
/** Returns the builder for the settings used for calls to streamingRead. */
public ServerStreamingCallSettings.Builder<ReadRequest, PartialResultSet>
streamingReadSettings() {
return streamingReadSettings;
}
/** Returns the builder for the settings used for calls to beginTransaction. */
public UnaryCallSettings.Builder<BeginTransactionRequest, Transaction>
beginTransactionSettings() {
return beginTransactionSettings;
}
/** Returns the builder for the settings used for calls to commit. */
public UnaryCallSettings.Builder<CommitRequest, CommitResponse> commitSettings() {
return commitSettings;
}
/** Returns the builder for the settings used for calls to rollback. */
public UnaryCallSettings.Builder<RollbackRequest, Empty> rollbackSettings() {
return rollbackSettings;
}
/** Returns the builder for the settings used for calls to partitionQuery. */
public UnaryCallSettings.Builder<PartitionQueryRequest, PartitionResponse>
partitionQuerySettings() {
return partitionQuerySettings;
}
/** Returns the builder for the settings used for calls to partitionRead. */
public UnaryCallSettings.Builder<PartitionReadRequest, PartitionResponse>
partitionReadSettings() {
return partitionReadSettings;
}
@Override
public SpannerStubSettings build() throws IOException {
return new SpannerStubSettings(this);
}
}
}
| |
/*
* Copyright (c) 2005-2013 Jyoti Parwatikar
* and Washington University in St. Louis
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
/*
* File: Reservation.java
* Author: Jyoti Parwatikar
* Email: jp@arl.wustl.edu
* Organization: Washington University
*
* Derived from: none
*
* Date Created: 5/31/2005
*
* Description:
*
* Modification History:
*
*/
import javax.swing.*;
import java.awt.event.*;
import java.io.*;
import java.util.*;
import java.awt.GridLayout;
import java.text.*;
//import javax.swing.text.*;
import java.awt.Font;
import javax.swing.event.*;
//import java.beans.*;
//import javax.accessibility.AccessibleContext;
class Reservation implements NCCPConnection.ConnectionListener
{
//error codes - add as defined
public static final int AUTH_ERR = 1;
public static final int NO_RES_ERR = 2;
public static final int ALLOC_ERR = 3;
public static final int EXPIRED = 4;
public static final int ALERT = 5;
public static final int ACK = 8;
private ExpCoordinator expCoordinator = null;
private GregorianCalendar earlyStartTime = null;
private GregorianCalendar lateStartTime = null;
private double duration = 0; //duration in minutes
private SimpleDateFormat dateFormat = null;
protected static TimeZone cstTimeZone = null;
protected static int index = 0;
private NCCPRequest pendingRequest = null;
////////////////////////////////////////////// Reservation.Reservable interface /////////////////////////////////////////////////////////////////////
public interface Reservable
{
public boolean isReserved();
}
////////////////////////////////////////////// Reservation.Reservable interface /////////////////////////////////////////////////////////////////////
public interface ReservationResponder
{
public void processSuccess(double errorcode, Date d);
public void processError(double errorcode, String errormsg, Date d);
}
/////////////////////////////////////////////////////// NCCPRequest ///////////////////////////////////////////////////////////////////////
private class NCCPRequest extends ExpRequest
{
private Vector<Reservable> topologyRequests;
private String ref_id = "";
private ReservationResponder rresponder = null;
/////////////////////////////////////////////// NCCP_Requester /////////////////////////////////////////////////////////////
private class NCCP_Requester extends ExpRequest.RequesterBase
{
public NCCP_Requester()
{
super(ExpDaemon.NCCP_Operation_ReservationRequest, true);
setMarker(new REND_Marker_class(ExpDaemon.NCCP_Operation_ReservationRequest, getNextIndex()));
}
public void storeData(DataOutputStream dout) throws IOException
{
//first change time to UTC or GMT
//long early = earlyStartTime.getTimeInMillis();
//long late = lateStartTime.getTimeInMillis();
//ExpCoordinator.print(new String("Reservation.NCCPRequest.NCCP_Requester.storeData earlyStartTime: " + early + "ms lateStartTime: " + late + "ms duration: " + duration + "min"), 2);
GregorianCalendar tmp_cal = new GregorianCalendar(cstTimeZone);
tmp_cal.setTime(earlyStartTime.getTime());
String early = dateFormat.format(tmp_cal.getTime());
tmp_cal.setTime(lateStartTime.getTime());
String late = dateFormat.format(tmp_cal.getTime());
ExpCoordinator.print(new String("Reservation.NCCPRequest.NCCP_Requester.storeData earlyStartTime: " + early + " lateStartTime: " + late + " duration: " + duration + "min"), 2);
ONL.NCCPWriter nccp_writer = new ONL.NCCPWriter(dout);
//nccp_writer.writeString("experiment");
nccp_writer.writeString(expCoordinator.getProperty(ExpCoordinator.USERNAME));
nccp_writer.writeString(expCoordinator.getProperty(ExpCoordinator.PASSWORD));
//expCoordinator.getCurrentExp().writeTopologySummary(nccp_writer);
nccp_writer.writeString(early);
nccp_writer.writeString(late);
nccp_writer.writeInt((int)duration);
nccp_writer.writeInt(expCoordinator.getCurrentExp().getNumCfgObjs());
}
}//end inner class Reservation.NCCPRequest.NCCP_Requester
/////////////////////////////////////////////// NCCP_Response //////////////////////////////////////////////////////////////
private class NCCP_Response extends NCCP.ResponseBase
{
private String errorMsg;
private int errorCode;
private String startTime = null;
public NCCP_Response() { super(4);}
public void retrieveData(DataInput din) throws IOException
{
errorMsg = NCCP.readString(din);
errorCode = din.readInt();
startTime = NCCP.readString(din);
if (status != NCCP.Status_Fine)
ExpCoordinator.printer.print(new String("Reservation.NCCPRequest.NCCP_Response.retrieveData msg = " + errorMsg), 2);
}
public void retrieveFieldData(DataInput din) throws IOException {}
public double getData(MonitorDataType.Base mdt) { return 0;}
public String getErrorMessage() { return errorMsg;}
public int getErrorCode() { return errorCode;}
public Date getStartTime()
{
try
{
return (dateFormat.parse(startTime));
}
catch (ParseException e)
{
return null;
}
}
}//end inner class Reservation.NCCPRequest.NCCP_Response
/////////////////////////////////////////////// NCCP_EndReservationReq /////////////////////////////////////////////////////////////
private class NCCP_EndReservationReq extends NCCP.RequesterBase
{
private String refid = "";
public NCCP_EndReservationReq(String ref)
{
super(ExpDaemon.NCCP_Operation_EndReservationReq);
setMarker(new REND_Marker_class(ExpDaemon.NCCP_Operation_EndReservationReq, getNextIndex()));
refid = new String(ref);
}
public void storeData(DataOutputStream dout) throws IOException
{
NCCP.writeString(refid, dout);
//NCCP.writeComponentID(null, dout);
}
}//end inner class Reservation.NCCPRequest.NCCP_Requester
/////////////////////////////////////////////////////////////////////////////////
public NCCPRequest(NCCPCancelReq req)
{
request = req;
response = new NCCP_Response();
topologyRequests = new Vector<Reservable>();
}
public NCCPRequest(NCCPExtensionReq req)
{
request = req;
response = new NCCP_Response();
topologyRequests = new Vector<Reservable>();
}
public NCCPRequest()
{
request = new NCCP_Requester();
response = new NCCP_Response();
topologyRequests = new Vector<Reservable>();
}
public NCCPRequest(ReservationResponder rr)
{
this();
rresponder = rr;
}
public void processResponse(NCCP.ResponseBase r)
{
boolean remove = true;
int i =0;
int max = 0;
Date d = null;
String errormsg = "";
if (r.getStatus() == NCCP.Status_Fine)
{
d = ((NCCP_Response)r).getStartTime();
//check requests were all answered
//remove all requests from ExpCoordinator
Vector<Reservable> failedRequests = new Vector<Reservable>();
max = topologyRequests.size();
Reservable req;
for (i = 0; i < max; ++i)
{
req = (Reservable)topologyRequests.elementAt(i);
if (!req.isReserved()) failedRequests.add(req);
expCoordinator.removeRequest((ExpRequest)req);
}
if (failedRequests.size() > 0)
{
errormsg = "Incomplete Topology Reserved.";
JOptionPane.showMessageDialog(expCoordinator.getMainWindow(),
new Object[]{errormsg},
"Reservation Error",
JOptionPane.ERROR_MESSAGE);
failedRequests.clear();
if (rresponder != null)
rresponder.processError(r.getStatus(), errormsg, d);
}
topologyRequests.clear();
if (request instanceof NCCPExtensionReq)
{
errormsg = ((NCCP_Response)r).getErrorMessage();
JOptionPane.showMessageDialog(ExpCoordinator.getMainWindow(),
new Object[]{errormsg},
"Reservation",
JOptionPane.PLAIN_MESSAGE);
}
else if (request instanceof NCCPCancelReq)
{
errormsg = ((NCCP_Response)r).getErrorMessage();
JOptionPane.showMessageDialog(ExpCoordinator.getMainWindow(),
new Object[]{((NCCP_Response)r).getErrorMessage()},//"Cancel Reservation Succeeded"},
"Reservation",
JOptionPane.PLAIN_MESSAGE);
}
else
{
errormsg = ((NCCP_Response)r).getErrorMessage();
String date_time = "";
if (rresponder != null)
rresponder.processSuccess(r.getStatus(), d);
else
{
if (d != null) date_time = DateFormat.getDateTimeInstance().format(d);
JOptionPane.showMessageDialog(ExpCoordinator.getMainWindow(),
new Object[]{((NCCP_Response)r).getErrorMessage(), (new String("Reservation starts " + date_time))},
"Reservation",
JOptionPane.PLAIN_MESSAGE);
}
}
}
else
{
if ((((NCCP_Response)r).getErrorCode() & AUTH_ERR) > 0)
{
if (rresponder == null)
{
expCoordinator.clearPassword();
expCoordinator.getUserInfo("Authentication Failure");
remove = false;
expCoordinator.sendRequest(this);
}
else
{
rresponder.processError(r.getStatus(), "Authentication Failure", null);
}
}
else
{
if ((((NCCP_Response)r).getErrorCode() & ACK) > 0)
{
String oref_id = ref_id;
ref_id = new String(((NCCP_Response)r).getErrorMessage());
ExpCoordinator.print(new String("Reservation.NCCPRequest.processResponse got ACK for reservation id:" + ref_id + " set from " + oref_id));
remove = false;
//form and send requests
Topology topo = expCoordinator.getTopology();
Vector clusters = topo.getClusters();
ONLComponentList cl;
max = clusters.size();
Cluster.Instance cluster;
Experiment ex = expCoordinator.getCurrentExp();
Cluster.Instance.NCCP_AddCluster addcluster;
for (i = 0; i < max; ++i)
{
cluster = (Cluster.Instance)clusters.elementAt(i);
ExpCoordinator.print(new String("Reservation.NCCPRequest.processResponse send cluster " + cluster.getTypeLabel() + " ref " + cluster.getIndex() + " i = " + i), 6);
addcluster = new Cluster.Instance.NCCP_AddCluster(cluster, ex);
addcluster.setReservation(ref_id, ExpDaemon.NCCP_Operation_ReservationCluster);
topologyRequests.add(addcluster);
expCoordinator.sendRequest(addcluster);
}
cl = topo.getNodes();
max = cl.getSize();
ONLComponent c;
Experiment.NCCP_AddComponent addc;
for (i = 0; i < max; ++i)
{
c = cl.onlComponentAt(i);
addc = new Experiment.NCCP_AddComponent(c,ex);
addc.setReservation(ref_id, ExpDaemon.NCCP_Operation_ReservationComponent);
topologyRequests.add(addc);
expCoordinator.sendRequest(addc);
}
cl = topo.getLinks();
max = cl.getSize();
LinkDescriptor.NCCP_AddLink addl;
for (i = 0; i < max; ++i)
{
c = cl.onlComponentAt(i);
addl = new LinkDescriptor.NCCP_AddLink(((LinkDescriptor)c),ex);
addl.setReservation(ref_id, ExpDaemon.NCCP_Operation_ReservationLink);
topologyRequests.add(addl);
expCoordinator.sendRequest(addl);
}
//end with end reservation operation
expCoordinator.sendMessage(new NCCP_EndReservationReq(ref_id));
}
else
{
errormsg = ((NCCP_Response)r).getErrorMessage();
if (rresponder != null)
{
rresponder.processError(r.getStatus(), errormsg, null);
}
else
{
JOptionPane.showMessageDialog(expCoordinator.getMainWindow(),
new Object[]{errormsg},
"Reservation Error",
JOptionPane.ERROR_MESSAGE);
}
}
}
}
if (remove || (request instanceof NCCPCancelReq))
expCoordinator.removeRequest(this);
}
}//end inner class Reservation.NCCPRequest
///////////////////////////////////////////////// Action ///////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
public static class Action extends AbstractAction implements ExpCoordinator.MenuAction
{
private Reservation reservation;
private ExpCoordinator expCoordinator = null;
public Action(ExpCoordinator ec)
{
super("Make Reservation");
reservation = new Reservation(ec);
expCoordinator = ec;
}
public void actionPerformed(ActionEvent e)
{
if (!expCoordinator.getCurrentExp().isEmpty()) reservation.getReservation();
else JOptionPane.showMessageDialog(expCoordinator.getMainWindow(),
"You need to specify a topology.",
"Reservation Error",
JOptionPane.ERROR_MESSAGE);
}
//ExpCoordinator.MenuAction interface
public int getType() { return ExpCoordinator.RESERVATION;}
public boolean isType(int t) { return (t == ExpCoordinator.RESERVATION);}
//end ExpCoordinator.MenuAction interface
}//end inner class Reservation.Action
///////////////////////////////////////////////// ExtendAction ///////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
public static class ExtendAction extends AbstractAction implements ExpCoordinator.MenuAction
{
private Reservation reservation;
private ExpCoordinator expCoordinator = null;
public ExtendAction(ExpCoordinator ec)
{
super("Extend Current Reservation");
//setEnabled(false);
reservation = new Reservation(ec);
expCoordinator = ec;
}
public void actionPerformed(ActionEvent e)
{
reservation.getExtension();
}
//ExpCoordinator.MenuAction interface
public int getType() { return ExpCoordinator.EXT_RESERVATION;}
public boolean isType(int t) { return (t == ExpCoordinator.EXT_RESERVATION);}
//end ExpCoordinator.MenuAction interface
}//end inner class Reservation.ExtendAction
///////////////////////////////////////////////// CancelAction ///////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
public static class CancelAction extends AbstractAction implements ExpCoordinator.MenuAction
{
private Reservation reservation;
private ExpCoordinator expCoordinator = null;
public CancelAction(ExpCoordinator ec)
{
super("Cancel Current Reservation");
//setEnabled(false);
reservation = new Reservation(ec);
expCoordinator = ec;
}
public void actionPerformed(ActionEvent e)
{
int rtn = JOptionPane.showConfirmDialog(ExpCoordinator.getMainWindow(),
new String("Do you wish to cancel your current reservation?"),
new String("Cancel Reservation"),
JOptionPane.YES_NO_OPTION,
JOptionPane.QUESTION_MESSAGE);
if (rtn == JOptionPane.YES_OPTION)
{
reservation.cancelReservation();
}
}
//ExpCoordinator.MenuAction interface
public int getType() { return ExpCoordinator.CANCEL_RESERVATION;}
public boolean isType(int t) { return (t == ExpCoordinator.CANCEL_RESERVATION);}
//end ExpCoordinator.MenuAction interface
}//end inner class Reservation.CancelAction
//////////////////////////////////////////////// DateField/////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
public static class DateField extends JPanel
{
private GregorianCalendar date = null;
private JComboBox month = null;
private JComboBox day = null;
private JComboBox hour = null;
private JComboBox min = null;
private JComboBox year = null;
private JComboBox am_pm = null;
public DateField(GregorianCalendar d) { this(d, false);}
public DateField(GregorianCalendar d, boolean later)
{
super();
setLayout(new BoxLayout(this, BoxLayout.X_AXIS));
date = d;
month = new JComboBox(new Object[]{"1","2","3","4","5","6","7","8","9","10","11","12"});
month.setSelectedIndex(date.get(Calendar.MONTH));//-1);
day = new JComboBox(new Object[]{"1","2","3","4","5","6","7","8","9","10","11","12","13","14","15","16","17","18","19","20","21","22","23","24","25","26","27","28","29","30","31"});
day.setSelectedIndex(date.get(Calendar.DAY_OF_MONTH)-1);
int curr_year = date.get(Calendar.YEAR);
int tmp_min = date.get(Calendar.MINUTE);
int curr_min = 0;
if (tmp_min >= 15) ++curr_min;
if (tmp_min >= 30) ++curr_min;
if (tmp_min >= 45) ++curr_min;
year = new JComboBox(new Object[]{String.valueOf(curr_year), String.valueOf(curr_year + 1)});
hour = new JComboBox(new Object[]{"12","1","2","3","4","5","6","7","8","9","10","11"});
int curr_hr = date.get(Calendar.HOUR);
boolean am = (date.get(Calendar.AM_PM) == Calendar.AM);
if (later)
{
++curr_hr;
if (curr_hr > 11)
{
curr_hr = 0;
am = !am;
}
}
hour.setSelectedIndex(curr_hr);
min = new JComboBox(new Object[]{"0","15","30","45"});
min.setSelectedIndex(curr_min);
am_pm = new JComboBox(new Object[]{"AM", "PM"});
if (!am) am_pm.setSelectedIndex(1);
JPanel tmp_panel = new JPanel();
JLabel tmp_lbl = null;
tmp_panel.setLayout(new GridLayout(2, 1));
tmp_lbl = new JLabel("month");
Font f = tmp_lbl.getFont().deriveFont(Font.PLAIN);
tmp_lbl.setFont(f);
tmp_panel.add(tmp_lbl);
tmp_panel.add(month);
add(tmp_panel);
tmp_panel = new JPanel();
tmp_panel.setLayout(new GridLayout(2, 1));
tmp_lbl = new JLabel("day");
tmp_lbl.setFont(f);
tmp_panel.add(tmp_lbl);
tmp_panel.add(day);
add(tmp_panel);
tmp_panel = new JPanel();
tmp_panel.setLayout(new GridLayout(2, 1));
tmp_lbl = new JLabel("year");
tmp_lbl.setFont(f);
tmp_panel.add(tmp_lbl);
tmp_panel.add(year);
add(tmp_panel);
tmp_panel = new JPanel();
tmp_panel.setLayout(new GridLayout(2, 1));
tmp_lbl = new JLabel("hour");
tmp_lbl.setFont(f);
tmp_panel.add(tmp_lbl);
tmp_panel.add(hour);
add(tmp_panel);
tmp_panel = new JPanel();
tmp_panel.setLayout(new GridLayout(2, 1));
tmp_lbl = new JLabel("min");
tmp_lbl.setFont(f);
tmp_panel.add(tmp_lbl);
tmp_panel.add(min);
add(tmp_panel);
tmp_panel = new JPanel();
tmp_panel.setLayout(new GridLayout(2, 1));
tmp_panel.add(new JLabel(""));
tmp_panel.add(am_pm);
add(tmp_panel);
}
public void computeTime()
{
ExpCoordinator.print(new String("computeTime: " + date.get(Calendar.MONTH) + " " + date.get(Calendar.DAY_OF_MONTH) + " " + date.get(Calendar.YEAR) + " " + date.get(Calendar.HOUR) + " " + date.get(Calendar.MINUTE) + " " + date.get(Calendar.AM_PM) + " ms:" + date.getTimeInMillis()), 2);
int hr = hour.getSelectedIndex();
if (am_pm.getSelectedIndex() == 1)// && hr != 12))
{
ExpCoordinator.print(new String(" is pm hr:" + hr), 2);
hr += 12;
}
if (am_pm.getSelectedIndex() == 0)// && hr == 12))
{
ExpCoordinator.print(new String(" is am hr:" + hr), 2);
//hr = 0;
}
date.set(Integer.parseInt((String)year.getSelectedItem()),
(month.getSelectedIndex()), // + 1),
(day.getSelectedIndex() + 1),
hr,
(min.getSelectedIndex() * 15));
ExpCoordinator.print(new String(" after setting: " + date.get(Calendar.MONTH) + " " + date.get(Calendar.DAY_OF_MONTH) + " " + date.get(Calendar.YEAR) + " " + date.get(Calendar.HOUR_OF_DAY) + " " + date.get(Calendar.MINUTE) + " " + date.get(Calendar.AM_PM) + " ms:" + date.getTimeInMillis()), 2);
}
}//end inner class DateField
public static class DurationField extends JPanel
{
private JFormattedTextField hours = null;
private JComboBox min = null;
public DurationField(int h)
{
this();
hours.setValue(h);
}
public DurationField()
{
super();
setLayout(new BoxLayout(this, BoxLayout.X_AXIS));
JLabel tmp_lbl = new JLabel("hours:");
Font f = tmp_lbl.getFont().deriveFont(Font.PLAIN);
tmp_lbl.setFont(f);
add(tmp_lbl);
NumberFormat nf = NumberFormat.getIntegerInstance();
//nf.setParseIntegerOnly(true);
//((DecimalFormat)nf).setNegativePrefix("");
hours = new TextFieldPlus.NumberTextField(nf);
hours.setFocusLostBehavior(JFormattedTextField.COMMIT_OR_REVERT);
add(hours);
min = new JComboBox(new Object[]{"0", "15", "30", "45"});
tmp_lbl = new JLabel("min:");
tmp_lbl.setFont(f);
add(tmp_lbl);
add(min);
}
public int getDuration()
{
int tmp_min = min.getSelectedIndex() * 15;
if (hours.getText().length() > 0)
tmp_min += (Integer.parseInt(hours.getText()) * 60);
return tmp_min;
}
}//end inner class DurationField
///////////////////////////////////////////////// NCCPCancelReq ///////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
private class NCCPCancelReq extends ExpRequest.RequesterBase//ONLUndoManager.NCCP_Requester
{
public NCCPCancelReq()
{
super(ExpDaemon.NCCP_Operation_CancelRes, true);
setMarker(new REND_Marker_class(ExpDaemon.NCCP_Operation_CancelRes, getNextIndex()));
}
public void storeData(DataOutputStream dout) throws IOException
{
ExpCoordinator.print("Reservation.NCCPCancelReq.storeData", 4);
ONL.NCCPWriter nccp_writer = new ONL.NCCPWriter(dout);
nccp_writer.writeString(expCoordinator.getProperty(ExpCoordinator.USERNAME));
nccp_writer.writeString(expCoordinator.getProperty(ExpCoordinator.PASSWORD));
}
}//end inner class NCCPCancelReq
///////////////////////////////////////////////// NCCPExtensionReq ///////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
private class NCCPExtensionReq extends ExpRequest.RequesterBase
{
private int minutes = 0;
public NCCPExtensionReq(Experiment e, int min)
{
super(ExpDaemon.NCCP_Operation_ResExtension, true);
minutes = min;
setMarker(new REND_Marker_class(ExpDaemon.NCCP_Operation_ResExtension, getNextIndex()));
}
public void storeData(DataOutputStream dout) throws IOException
{
//super.storeData(dout);
NCCP.writeString(ExpCoordinator.theCoordinator.getProperty(ExpCoordinator.USERNAME), dout);
NCCP.writeString(ExpCoordinator.theCoordinator.getProperty(ExpCoordinator.PASSWORD), dout);
dout.writeInt(minutes);
}
}//end inner class NCCPExtensionReq
public Reservation(ExpCoordinator ec)
{
expCoordinator = ec;
earlyStartTime = new GregorianCalendar();
lateStartTime = new GregorianCalendar();
ExpCoordinator.print(new String("Reservation earlyStartTime timezone " + earlyStartTime.getTimeZone().getID()));
if (cstTimeZone == null)
{
cstTimeZone = TimeZone.getTimeZone("America/Chicago");
//TimeZone tz = earlyStartTime.getTimeZone();
ExpCoordinator.print(new String("timezone offset: " + cstTimeZone.getRawOffset() + " id:" + cstTimeZone.getID()), 2);
}
dateFormat = new SimpleDateFormat("yyyyMMddHHmmss");
dateFormat.setTimeZone(cstTimeZone);
}
public void setStartTime(Date d) //used to set initial time for reservation
{
earlyStartTime.setTime(d);
lateStartTime.setTime(d);
}
public SimpleDateFormat getDateFormat() { return dateFormat;}
public void getReservation() { getReservation(expCoordinator.getMainWindow(), "Make Reservation");}
public void getReservation(JFrame window, String ttl)
{
earlyStartTime.setTime(new Date());
//lateStartTime.setTime(new Date());
duration = 60;
DateField early = new DateField(earlyStartTime);
//DateField late = new DateField(lateStartTime, true);
DurationField late = new DurationField(1);
DurationField dur = new DurationField();
JLabel lbl1 = new JLabel("Earliest Start:");
JLabel lbl2 = new JLabel("Range of start from earliest:");
JLabel lbl3 = new JLabel("Duration of experiment:");
Object[] objectArray = {lbl1, early, lbl2, late, lbl3, dur};
final String opt0 = "Enter";
final String opt1 = "Cancel";
Object[] options = {opt0,opt1};
int rtn = JOptionPane.showOptionDialog(window,
objectArray,
ttl,
JOptionPane.YES_NO_OPTION,
JOptionPane.QUESTION_MESSAGE,
null,
options,
options[0]);
if (rtn == JOptionPane.YES_OPTION)
{
duration = dur.getDuration();
early.computeTime(); //sets earlyStartTime from user input
int tmp = late.getDuration();
lateStartTime.setTime(earlyStartTime.getTime());
lateStartTime.add(Calendar.MINUTE, tmp); //set lateStartTime from user input
//late.computeTime(); //set lateStartTime from user input
expCoordinator.getUserInfo(); //make sure we have username and password
print(2);
sendRequest(new NCCPRequest());
}
}
public void getReservation(GregorianCalendar early, int range, double dur, ReservationResponder rr)
{
earlyStartTime.setTime(early.getTime());
lateStartTime.setTime(earlyStartTime.getTime());
lateStartTime.add(Calendar.MINUTE, range);
duration = dur;
print(2);
sendRequest(new NCCPRequest(rr));
}
public void cancelReservation()
{
expCoordinator.getUserInfo(); //make sure we have username and password
print(2);
sendRequest(new NCCPRequest(new NCCPCancelReq()));
}
public void getExtension() { getExtension("Extend Current Reservation");}
public void getExtension(String ttl)
{
JFrame window = expCoordinator.getMainWindow();
JLabel lbl = new JLabel("Extend by time:");
DurationField dur = new DurationField();
Object[] objectArray = {lbl, dur};
final String opt0 = "Enter";
final String opt1 = "Cancel";
Object[] options = {opt0,opt1};
dur.setSize(200, dur.getHeight());
int rtn = JOptionPane.showOptionDialog(window,
objectArray,
ttl,
JOptionPane.YES_NO_OPTION,
JOptionPane.QUESTION_MESSAGE,
null,
options,
options[0]);
expCoordinator.print(new String("Reservation.getExtension Durations w=" + dur.getWidth()));
if (rtn == JOptionPane.YES_OPTION)
{
duration = dur.getDuration();
expCoordinator.getUserInfo(); //make sure we have username and password
print(2);
sendRequest(new NCCPRequest(new NCCPExtensionReq(expCoordinator.getCurrentExp(), (int)duration)));
}
}
private void sendRequest(NCCPRequest nccp_req)
{
ExpCoordinator.print("Reservation.sendRequest",2);
if (!expCoordinator.isConnected())
{
if (pendingRequest == null)
{
ExpCoordinator.print(" adding self as connection listener",2);
expCoordinator.addConnectionListener(this);
}
pendingRequest = nccp_req;
// if (!expCoordinator.connect())
expCoordinator.connect();
}
else
{
ExpCoordinator.print(" adding request to expDaemon",2);
expCoordinator.sendRequest(nccp_req);
}
}
public GregorianCalendar getEarlyStartTime() { return earlyStartTime;}
public GregorianCalendar getLateStartTime() { return lateStartTime;}
public double getDuration() { return duration;} //in minutes
public void print(int l)
{
ExpCoordinator.print(new String("Reservation earlyStartTime: " + earlyStartTime.getTime().getTime() + " ms lateStartTime: " + lateStartTime.getTime().getTime() + " ms duration: " + duration + " min"), l);
}
//NCCPConnection.ConnectionListener
public void connectionFailed(NCCPConnection.ConnectionEvent e){}
public void connectionClosed(NCCPConnection.ConnectionEvent e){}
public void connectionOpened(NCCPConnection.ConnectionEvent e)
{
ExpCoordinator.print("Reservation.connectionOpened", 2);
if (pendingRequest != null)
{
ExpCoordinator.print(" calling sendRequest", 2);
sendRequest(pendingRequest);
pendingRequest = null;
expCoordinator.removeConnectionListener(this);
}
}
//end NCCPConnection.ConnectionListener
protected static int getNextIndex() { return(++index);}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.9.2)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.airavata.model.messaging.event;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-8-3")
public class TaskStatusChangeEvent implements org.apache.thrift.TBase<TaskStatusChangeEvent, TaskStatusChangeEvent._Fields>, java.io.Serializable, Cloneable, Comparable<TaskStatusChangeEvent> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TaskStatusChangeEvent");
private static final org.apache.thrift.protocol.TField STATE_FIELD_DESC = new org.apache.thrift.protocol.TField("state", org.apache.thrift.protocol.TType.I32, (short)1);
private static final org.apache.thrift.protocol.TField TASK_IDENTITY_FIELD_DESC = new org.apache.thrift.protocol.TField("taskIdentity", org.apache.thrift.protocol.TType.STRUCT, (short)2);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new TaskStatusChangeEventStandardSchemeFactory());
schemes.put(TupleScheme.class, new TaskStatusChangeEventTupleSchemeFactory());
}
private org.apache.airavata.model.status.TaskState state; // required
private TaskIdentifier taskIdentity; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
/**
*
* @see org.apache.airavata.model.status.TaskState
*/
STATE((short)1, "state"),
TASK_IDENTITY((short)2, "taskIdentity");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // STATE
return STATE;
case 2: // TASK_IDENTITY
return TASK_IDENTITY;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.STATE, new org.apache.thrift.meta_data.FieldMetaData("state", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, org.apache.airavata.model.status.TaskState.class)));
tmpMap.put(_Fields.TASK_IDENTITY, new org.apache.thrift.meta_data.FieldMetaData("taskIdentity", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TaskIdentifier.class)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(TaskStatusChangeEvent.class, metaDataMap);
}
public TaskStatusChangeEvent() {
}
public TaskStatusChangeEvent(
org.apache.airavata.model.status.TaskState state,
TaskIdentifier taskIdentity)
{
this();
this.state = state;
this.taskIdentity = taskIdentity;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public TaskStatusChangeEvent(TaskStatusChangeEvent other) {
if (other.isSetState()) {
this.state = other.state;
}
if (other.isSetTaskIdentity()) {
this.taskIdentity = new TaskIdentifier(other.taskIdentity);
}
}
public TaskStatusChangeEvent deepCopy() {
return new TaskStatusChangeEvent(this);
}
@Override
public void clear() {
this.state = null;
this.taskIdentity = null;
}
/**
*
* @see org.apache.airavata.model.status.TaskState
*/
public org.apache.airavata.model.status.TaskState getState() {
return this.state;
}
/**
*
* @see org.apache.airavata.model.status.TaskState
*/
public void setState(org.apache.airavata.model.status.TaskState state) {
this.state = state;
}
public void unsetState() {
this.state = null;
}
/** Returns true if field state is set (has been assigned a value) and false otherwise */
public boolean isSetState() {
return this.state != null;
}
public void setStateIsSet(boolean value) {
if (!value) {
this.state = null;
}
}
public TaskIdentifier getTaskIdentity() {
return this.taskIdentity;
}
public void setTaskIdentity(TaskIdentifier taskIdentity) {
this.taskIdentity = taskIdentity;
}
public void unsetTaskIdentity() {
this.taskIdentity = null;
}
/** Returns true if field taskIdentity is set (has been assigned a value) and false otherwise */
public boolean isSetTaskIdentity() {
return this.taskIdentity != null;
}
public void setTaskIdentityIsSet(boolean value) {
if (!value) {
this.taskIdentity = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case STATE:
if (value == null) {
unsetState();
} else {
setState((org.apache.airavata.model.status.TaskState)value);
}
break;
case TASK_IDENTITY:
if (value == null) {
unsetTaskIdentity();
} else {
setTaskIdentity((TaskIdentifier)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case STATE:
return getState();
case TASK_IDENTITY:
return getTaskIdentity();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case STATE:
return isSetState();
case TASK_IDENTITY:
return isSetTaskIdentity();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof TaskStatusChangeEvent)
return this.equals((TaskStatusChangeEvent)that);
return false;
}
public boolean equals(TaskStatusChangeEvent that) {
if (that == null)
return false;
boolean this_present_state = true && this.isSetState();
boolean that_present_state = true && that.isSetState();
if (this_present_state || that_present_state) {
if (!(this_present_state && that_present_state))
return false;
if (!this.state.equals(that.state))
return false;
}
boolean this_present_taskIdentity = true && this.isSetTaskIdentity();
boolean that_present_taskIdentity = true && that.isSetTaskIdentity();
if (this_present_taskIdentity || that_present_taskIdentity) {
if (!(this_present_taskIdentity && that_present_taskIdentity))
return false;
if (!this.taskIdentity.equals(that.taskIdentity))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_state = true && (isSetState());
list.add(present_state);
if (present_state)
list.add(state.getValue());
boolean present_taskIdentity = true && (isSetTaskIdentity());
list.add(present_taskIdentity);
if (present_taskIdentity)
list.add(taskIdentity);
return list.hashCode();
}
@Override
public int compareTo(TaskStatusChangeEvent other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetState()).compareTo(other.isSetState());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetState()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.state, other.state);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetTaskIdentity()).compareTo(other.isSetTaskIdentity());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetTaskIdentity()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.taskIdentity, other.taskIdentity);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("TaskStatusChangeEvent(");
boolean first = true;
sb.append("state:");
if (this.state == null) {
sb.append("null");
} else {
sb.append(this.state);
}
first = false;
if (!first) sb.append(", ");
sb.append("taskIdentity:");
if (this.taskIdentity == null) {
sb.append("null");
} else {
sb.append(this.taskIdentity);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if (!isSetState()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'state' is unset! Struct:" + toString());
}
if (!isSetTaskIdentity()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'taskIdentity' is unset! Struct:" + toString());
}
// check for sub-struct validity
if (taskIdentity != null) {
taskIdentity.validate();
}
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class TaskStatusChangeEventStandardSchemeFactory implements SchemeFactory {
public TaskStatusChangeEventStandardScheme getScheme() {
return new TaskStatusChangeEventStandardScheme();
}
}
private static class TaskStatusChangeEventStandardScheme extends StandardScheme<TaskStatusChangeEvent> {
public void read(org.apache.thrift.protocol.TProtocol iprot, TaskStatusChangeEvent struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // STATE
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.state = org.apache.airavata.model.status.TaskState.findByValue(iprot.readI32());
struct.setStateIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // TASK_IDENTITY
if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.taskIdentity = new TaskIdentifier();
struct.taskIdentity.read(iprot);
struct.setTaskIdentityIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, TaskStatusChangeEvent struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.state != null) {
oprot.writeFieldBegin(STATE_FIELD_DESC);
oprot.writeI32(struct.state.getValue());
oprot.writeFieldEnd();
}
if (struct.taskIdentity != null) {
oprot.writeFieldBegin(TASK_IDENTITY_FIELD_DESC);
struct.taskIdentity.write(oprot);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class TaskStatusChangeEventTupleSchemeFactory implements SchemeFactory {
public TaskStatusChangeEventTupleScheme getScheme() {
return new TaskStatusChangeEventTupleScheme();
}
}
private static class TaskStatusChangeEventTupleScheme extends TupleScheme<TaskStatusChangeEvent> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, TaskStatusChangeEvent struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
oprot.writeI32(struct.state.getValue());
struct.taskIdentity.write(oprot);
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, TaskStatusChangeEvent struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
struct.state = org.apache.airavata.model.status.TaskState.findByValue(iprot.readI32());
struct.setStateIsSet(true);
struct.taskIdentity = new TaskIdentifier();
struct.taskIdentity.read(iprot);
struct.setTaskIdentityIsSet(true);
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hedwig.server.netty;
import java.util.HashMap;
import java.util.Map;
import java.beans.ConstructorProperties;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.hedwig.protocol.PubSubProtocol.OperationType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Server Stats
*/
public class ServerStats {
private static final Logger LOG = LoggerFactory.getLogger(ServerStats.class);
static ServerStats instance = new ServerStats();
/**
* A read view of stats, also used in CompositeViewData to expose to JMX
*/
public static class OpStatData {
private final long maxLatency, minLatency;
private final double avgLatency;
private final long numSuccessOps, numFailedOps;
private final String latencyHist;
@ConstructorProperties({"maxLatency", "minLatency", "avgLatency",
"numSuccessOps", "numFailedOps", "latencyHist"})
public OpStatData(long maxLatency, long minLatency, double avgLatency,
long numSuccessOps, long numFailedOps, String latencyHist) {
this.maxLatency = maxLatency;
this.minLatency = minLatency == Long.MAX_VALUE ? 0 : minLatency;
this.avgLatency = avgLatency;
this.numSuccessOps = numSuccessOps;
this.numFailedOps = numFailedOps;
this.latencyHist = latencyHist;
}
public long getMaxLatency() {
return maxLatency;
}
public long getMinLatency() {
return minLatency;
}
public double getAvgLatency() {
return avgLatency;
}
public long getNumSuccessOps() {
return numSuccessOps;
}
public long getNumFailedOps() {
return numFailedOps;
}
public String getLatencyHist() {
return latencyHist;
}
}
/**
* Operation Statistics
*/
public static class OpStats {
static final int NUM_BUCKETS = 3*9 + 2;
long maxLatency = 0;
long minLatency = Long.MAX_VALUE;
double totalLatency = 0.0f;
long numSuccessOps = 0;
long numFailedOps = 0;
long[] latencyBuckets = new long[NUM_BUCKETS];
OpStats() {}
/**
* Increment number of failed operations
*/
synchronized public void incrementFailedOps() {
++numFailedOps;
}
/**
* Update Latency
*/
synchronized public void updateLatency(long latency) {
if (latency < 0) {
// less than 0ms . Ideally this should not happen.
// We have seen this latency negative in some cases due to the
// behaviors of JVM. Ignoring the statistics updation for such
// cases.
LOG.warn("Latency time coming negative");
return;
}
totalLatency += latency;
++numSuccessOps;
if (latency < minLatency) {
minLatency = latency;
}
if (latency > maxLatency) {
maxLatency = latency;
}
int bucket;
if (latency <= 100) { // less than 100ms
bucket = (int)(latency / 10);
} else if (latency <= 1000) { // 100ms ~ 1000ms
bucket = 1 * 9 + (int)(latency / 100);
} else if (latency <= 10000) { // 1s ~ 10s
bucket = 2 * 9 + (int)(latency / 1000);
} else { // more than 10s
bucket = 3 * 9 + 1;
}
++latencyBuckets[bucket];
}
synchronized public OpStatData toOpStatData() {
double avgLatency = numSuccessOps > 0 ? totalLatency / numSuccessOps : 0.0f;
StringBuilder sb = new StringBuilder();
for (int i=0; i<NUM_BUCKETS; i++) {
sb.append(latencyBuckets[i]);
if (i != NUM_BUCKETS - 1) {
sb.append(',');
}
}
return new OpStatData(maxLatency, minLatency, avgLatency,
numSuccessOps, numFailedOps, sb.toString());
}
}
public static ServerStats getInstance() {
return instance;
}
protected ServerStats() {
stats = new HashMap<OperationType, OpStats>();
for (OperationType type : OperationType.values()) {
stats.put(type, new OpStats());
}
}
Map<OperationType, OpStats> stats;
AtomicLong numRequestsReceived = new AtomicLong(0);
AtomicLong numRequestsRedirect = new AtomicLong(0);
AtomicLong numMessagesDelivered = new AtomicLong(0);
/**
* Stats of operations
*
* @param type
* Operation Type
* @return op stats
*/
public OpStats getOpStats(OperationType type) {
return stats.get(type);
}
public void incrementRequestsReceived() {
numRequestsReceived.incrementAndGet();
}
public void incrementRequestsRedirect() {
numRequestsRedirect.incrementAndGet();
}
public void incrementMessagesDelivered() {
numMessagesDelivered.incrementAndGet();
}
public long getNumRequestsReceived() {
return numRequestsReceived.get();
}
public long getNumRequestsRedirect() {
return numRequestsRedirect.get();
}
public long getNumMessagesDelivered() {
return numMessagesDelivered.get();
}
}
| |
/*
* Copyright 2015, The Querydsl Team (http://www.querydsl.com/team)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.querydsl.core.types;
import java.util.*;
import javax.annotation.Nullable;
import com.google.common.collect.ImmutableList;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.QueryException;
/**
* {@code ExpressionUtils} provides utilities for constructing common operation instances. This class is
* used internally in Querydsl and is not suitable to be used in cases where DSL methods are needed,
* since the Expression implementations used in this class are minimal internal implementations.
*
* @author tiwe
*
*/
public final class ExpressionUtils {
private static final class UnderscoreTemplates extends Templates {
private UnderscoreTemplates() {
add(PathType.PROPERTY, "{0}_{1}");
add(PathType.COLLECTION_ANY, "{0}");
add(PathType.LISTVALUE, "{0}_{1}");
add(PathType.LISTVALUE_CONSTANT, "{0}_{1}");
}
}
private static final Templates TEMPLATES = new UnderscoreTemplates();
/**
* Create a new Operation expression
*
* @param type type of expression
* @param operator operator
* @param args operation arguments
* @return operation expression
*/
public static <T> Operation<T> operation(Class<? extends T> type, Operator operator,
Expression<?>... args) {
return operation(type, operator, ImmutableList.copyOf(args));
}
/**
* Create a new Operation expression
*
* @param type type of expression
* @param operator operator
* @param args operation arguments
* @return operation expression
*/
@SuppressWarnings("unchecked")
public static <T> Operation<T> operation(Class<? extends T> type, Operator operator,
ImmutableList<Expression<?>> args) {
if (type.equals(Boolean.class)) {
return (Operation<T>) new PredicateOperation(operator, args);
} else {
return new OperationImpl<T>(type, operator, args);
}
}
/**
* Create a new Operation expression
*
* @param operator operator
* @param args operation arguments
* @return operation expression
*/
public static PredicateOperation predicate(Operator operator, Expression<?>... args) {
return predicate(operator, ImmutableList.copyOf(args));
}
/**
* Create a new Operation expression
*
* @param operator operator
* @param args operation arguments
* @return operation expression
*/
public static PredicateOperation predicate(Operator operator, ImmutableList<Expression<?>> args) {
return new PredicateOperation(operator, args);
}
/**
* Create a new Path expression
*
* @param type type of expression
* @param variable variable name
* @return path expression
*/
public static <T> Path<T> path(Class<? extends T> type, String variable) {
return new PathImpl<T>(type, variable);
}
/**
* Create a new Path expression
*
* @param type type of expression
* @param parent parent path
* @param property property name
* @return property path
*/
public static <T> Path<T> path(Class<? extends T> type, Path<?> parent, String property) {
return new PathImpl<T>(type, parent, property);
}
/**
* Create a new Path expression
*
* @param type type of expression
* @param metadata path metadata
* @param <T> type of expression
* @return path expression
*/
public static <T> Path<T> path(Class<? extends T> type, PathMetadata metadata) {
return new PathImpl<T>(type, metadata);
}
/**
* Create a new Template expression
*
* @param template template
* @param args template parameters
* @return template expression
*/
public static PredicateTemplate predicateTemplate(String template, Object... args) {
return predicateTemplate(TemplateFactory.DEFAULT.create(template), ImmutableList.copyOf(args));
}
/**
* Create a new Template expression
*
* @param template template
* @param args template parameters
* @return template expression
*/
public static PredicateTemplate predicateTemplate(String template, ImmutableList<?> args) {
return predicateTemplate(TemplateFactory.DEFAULT.create(template), args);
}
/**
* Create a new Template expression
*
* @param template template
* @param args template parameters
* @return template expression
*/
public static PredicateTemplate predicateTemplate(Template template, Object... args) {
return predicateTemplate(template, ImmutableList.copyOf(args));
}
/**
* Create a new Template expression
*
* @param template template
* @param args template parameters
* @return template expression
*/
public static PredicateTemplate predicateTemplate(Template template, ImmutableList<?> args) {
return new PredicateTemplate(template, args);
}
/**
* Create a new Template expression
*
* @param cl type of expression
* @param template template
* @param args template parameters
* @return template expression
*/
public static <T> TemplateExpression<T> template(Class<? extends T> cl, String template, Object... args) {
return template(cl, TemplateFactory.DEFAULT.create(template), ImmutableList.copyOf(args));
}
/**
* Create a new Template expression
*
* @param cl type of expression
* @param template template
* @param args template parameters
* @return template expression
*/
public static <T> TemplateExpression<T> template(Class<? extends T> cl, String template, ImmutableList<?> args) {
return template(cl, TemplateFactory.DEFAULT.create(template), args);
}
/**
* Create a new Template expression
*
* @param cl type of expression
* @param template template
* @param args template parameters
* @return template expression
*/
public static <T> TemplateExpression<T> template(Class<? extends T> cl, Template template, Object... args) {
return template(cl, template, ImmutableList.copyOf(args));
}
/**
* Create a new Template expression
*
* @param cl type of expression
* @param template template
* @param args template parameters
* @return template expression
*/
@SuppressWarnings("unchecked")
public static <T> TemplateExpression<T> template(Class<? extends T> cl, Template template, ImmutableList<?> args) {
if (cl.equals(Boolean.class)) {
return (TemplateExpression<T>) new PredicateTemplate(template, args);
} else {
return new TemplateExpressionImpl<T>(cl, template, args);
}
}
/**
* Create a {@code all col} expression
*
* @param col collection expression
* @return all col
*/
@SuppressWarnings("unchecked")
public static <T> Expression<T> all(CollectionExpression<?, ? super T> col) {
return new OperationImpl<T>((Class<T>) col.getParameter(0), Ops.QuantOps.ALL,
ImmutableList.<Expression<?>>of(col));
}
/**
* Create a {@code any col} expression
*
* @param col collection expression
* @return any col
*/
@SuppressWarnings("unchecked")
public static <T> Expression<T> any(CollectionExpression<?, ? super T> col) {
return new OperationImpl<T>((Class<T>) col.getParameter(0), Ops.QuantOps.ANY,
ImmutableList.<Expression<?>>of(col));
}
/**
* Create a {@code all col} expression
*
* @param col subquery expression
* @return all col
*/
@SuppressWarnings("unchecked")
public static <T> Expression<T> all(SubQueryExpression<? extends T> col) {
return new OperationImpl<T>(col.getType(), Ops.QuantOps.ALL, ImmutableList.<Expression<?>>of(col));
}
/**
* Create a {@code any col} expression
*
* @param col subquery expression
* @return any col
*/
@SuppressWarnings("unchecked")
public static <T> Expression<T> any(SubQueryExpression<? extends T> col) {
return new OperationImpl<T>(col.getType(), Ops.QuantOps.ANY, ImmutableList.<Expression<?>>of(col));
}
/**
* Create the intersection of the given arguments
*
* @param exprs predicates
* @return intersection
*/
@Nullable
public static Predicate allOf(Collection<Predicate> exprs) {
Predicate rv = null;
for (Predicate b : exprs) {
if (b != null) {
rv = rv == null ? b : ExpressionUtils.and(rv,b);
}
}
return rv;
}
/**
* Create the intersection of the given arguments
*
* @param exprs predicates
* @return intersection
*/
@Nullable
public static Predicate allOf(Predicate... exprs) {
Predicate rv = null;
for (Predicate b : exprs) {
if (b != null) {
rv = rv == null ? b : ExpressionUtils.and(rv,b);
}
}
return rv;
}
/**
* Create the intersection of the given arguments
*
* @param left lhs of expression
* @param right rhs of expression
* @return left and right
*/
public static Predicate and(Predicate left, Predicate right) {
left = (Predicate) extract(left);
right = (Predicate) extract(right);
if (left == null) {
return right;
} else if (right == null) {
return left;
} else {
return predicate(Ops.AND, left, right);
}
}
/**
* Create the union of the given arguments
*
* @param exprs predicate
* @return union
*/
@Nullable
public static Predicate anyOf(Collection<Predicate> exprs) {
Predicate rv = null;
for (Predicate b : exprs) {
if (b != null) {
rv = rv == null ? b : ExpressionUtils.or(rv,b);
}
}
return rv;
}
/**
* Create the union of the given arguments
*
* @param exprs predicates
* @return union
*/
@Nullable
public static Predicate anyOf(Predicate... exprs) {
Predicate rv = null;
for (Predicate b : exprs) {
if (b != null) {
rv = rv == null ? b : ExpressionUtils.or(rv,b);
}
}
return rv;
}
/**
* Create an alias expression with the given source and alias
*
* @param <D> type of expression
* @param source source
* @param alias alias
* @return source as alias
*/
public static <D> Expression<D> as(Expression<D> source, Path<D> alias) {
return operation(alias.getType(), Ops.ALIAS, source, alias);
}
/**
* Create an alias expression with the given source and alias
*
* @param <D> type of expression
* @param source source
* @param alias alias
* @return source as alias
*/
public static <D> Expression<D> as(Expression<D> source, String alias) {
return as(source, path(source.getType(), alias));
}
/**
* Create a {@code count(source)} expression
*
* @param source source
* @return count(source)
*/
public static Expression<Long> count(Expression<?> source) {
return operation(Long.class, Ops.AggOps.COUNT_AGG, source);
}
/**
* Create a {@code left == constant} expression
*
* @param <D> type of expressions
* @param left lhs of expression
* @param constant rhs of expression
* @return left == constant
*/
public static <D> Predicate eqConst(Expression<D> left, D constant) {
return eq(left, ConstantImpl.create(constant));
}
/**
* Create a {@code left == right} expression
*
* @param <D> type of expressions
* @param left lhs of expression
* @param right rhs of expression
* @return left == right
*/
public static <D> Predicate eq(Expression<D> left, Expression<? extends D> right) {
return predicate(Ops.EQ, left, right);
}
/**
* Create a {@code left in right} expression
*
* @param <D> type of expressions
* @param left lhs of expression
* @param right rhs of expression
* @return left in right
*/
public static <D> Predicate in(Expression<D> left, CollectionExpression<?,? extends D> right) {
return predicate(Ops.IN, left, right);
}
/**
* Create a {@code left in right} expression
*
* @param <D> type of expressions
* @param left lhs of expression
* @param right rhs of expression
* @return left in right
*/
public static <D> Predicate in(Expression<D> left, SubQueryExpression<? extends D> right) {
return predicate(Ops.IN, left, right);
}
/**
* Create a {@code left in right} expression
*
* @param <D> element type
* @param left lhs of expression
* @param right rhs of expression
* @return left in right
*/
public static <D> Predicate in(Expression<D> left, Collection<? extends D> right) {
if (right.size() == 1) {
return eqConst(left, right.iterator().next());
} else {
return predicate(Ops.IN, left, ConstantImpl.create(right));
}
}
/**
* Create a {@code left in right or...} expression for each list
*
* @param <D> element type
* @param left
* @param lists
* @return a {@code left in right or...} expression
*/
public static <D> Predicate inAny(Expression<D> left, Iterable<? extends Collection<? extends D>> lists) {
BooleanBuilder rv = new BooleanBuilder();
for (Collection<? extends D> list : lists) {
rv.or(in(left, list));
}
return rv;
}
/**
* Create a {@code left is null} expression
*
* @param left operation argument
* @return left is null
*/
public static Predicate isNull(Expression<?> left) {
return predicate(Ops.IS_NULL, left);
}
/**
* Create a {@code left is not null} expression
*
* @param left operation argument
* @return left is null
*/
public static Predicate isNotNull(Expression<?> left) {
return predicate(Ops.IS_NOT_NULL, left);
}
/**
* Convert the given like pattern to a regex pattern
*
* @param expr expression to convert
* @return converted expression
*/
public static Expression<String> likeToRegex(Expression<String> expr) {
return likeToRegex(expr, true);
}
/**
* Convert the given like pattern to a regex pattern
*
* @param expr expression to be converted
* @param matchStartAndEnd if start and end should be matched as well
* @return converted expression
*/
@SuppressWarnings("unchecked")
public static Expression<String> likeToRegex(Expression<String> expr, boolean matchStartAndEnd) {
// TODO : this should take the escape character into account
if (expr instanceof Constant<?>) {
final String like = expr.toString();
final StringBuilder rv = new StringBuilder(like.length() + 4);
if (matchStartAndEnd && !like.startsWith("%")) {
rv.append('^');
}
for (int i = 0; i < like.length(); i++) {
char ch = like.charAt(i);
if (ch == '.' || ch == '*' || ch == '?') {
rv.append('\\');
} else if (ch == '%') {
rv.append(".*");
continue;
} else if (ch == '_') {
rv.append('.');
continue;
}
rv.append(ch);
}
if (matchStartAndEnd && !like.endsWith("%")) {
rv.append('$');
}
if (!like.equals(rv.toString())) {
return ConstantImpl.create(rv.toString());
}
} else if (expr instanceof Operation<?>) {
Operation<?> o = (Operation<?>) expr;
if (o.getOperator() == Ops.CONCAT) {
Expression<String> lhs = likeToRegex((Expression<String>) o.getArg(0), false);
Expression<String> rhs = likeToRegex((Expression<String>) o.getArg(1), false);
if (lhs != o.getArg(0) || rhs != o.getArg(1)) {
return operation(String.class, Ops.CONCAT, lhs, rhs);
}
}
}
return expr;
}
/**
* Create a list expression for the given arguments
*
* @param exprs list elements
* @return list expression
*/
public static <T> Expression<T> list(Class<T> clazz, Expression<?>... exprs) {
return list(clazz, ImmutableList.copyOf(exprs));
}
/**
* Create a list expression for the given arguments
*
* @param exprs list elements
* @return list expression
*/
@SuppressWarnings("unchecked")
public static <T> Expression<T> list(Class<T> clazz, List<? extends Expression<?>> exprs) {
Expression<T> rv = (Expression<T>) exprs.get(0);
if (exprs.size() == 1) {
rv = operation(clazz, Ops.SINGLETON, rv, exprs.get(0));
} else {
for (int i = 1; i < exprs.size(); i++) {
rv = operation(clazz, Ops.LIST, rv, exprs.get(i));
}
}
return rv;
}
/**
* Convert the given expression from regex form to like
*
* @param expr expression to convert
* @return converted expression
*/
@SuppressWarnings("unchecked")
public static Expression<String> regexToLike(Expression<String> expr) {
if (expr instanceof Constant<?>) {
final String str = expr.toString();
final StringBuilder rv = new StringBuilder(str.length() + 2);
boolean escape = false;
for (int i = 0; i < str.length(); i++) {
final char ch = str.charAt(i);
if (!escape && ch == '.') {
if (i < str.length() - 1 && str.charAt(i + 1) == '*') {
rv.append('%');
i++;
} else {
rv.append('_');
}
continue;
} else if (!escape && ch == '\\') {
escape = true;
continue;
} else if (!escape && (ch == '[' || ch == ']' || ch == '^' || ch == '.' || ch == '*')) {
throw new QueryException("'" + str + "' can't be converted to like form");
} else if (escape && (ch == 'd' || ch == 'D' || ch == 's' || ch == 'S' || ch == 'w' || ch == 'W')) {
throw new QueryException("'" + str + "' can't be converted to like form");
}
rv.append(ch);
escape = false;
}
if (!rv.toString().equals(str)) {
return ConstantImpl.create(rv.toString());
}
} else if (expr instanceof Operation<?>) {
Operation<?> o = (Operation<?>) expr;
if (o.getOperator() == Ops.CONCAT) {
Expression<String> lhs = regexToLike((Expression<String>) o.getArg(0));
Expression<String> rhs = regexToLike((Expression<String>) o.getArg(1));
if (lhs != o.getArg(0) || rhs != o.getArg(1)) {
return operation(String.class, Ops.CONCAT, lhs, rhs);
}
}
}
return expr;
}
/**
* Create a {@code left != constant} expression
*
* @param <D> type of expression
* @param left lhs of expression
* @param constant rhs of expression
* @return left != constant
*/
public static <D> Predicate neConst(Expression<D> left, D constant) {
return ne(left, ConstantImpl.create(constant));
}
/**
* Create a {@code left != right} expression
*
* @param <D> type of expressions
* @param left lhs of expression
* @param right rhs of expression
* @return left != right
*/
public static <D> Predicate ne(Expression<D> left, Expression<? super D> right) {
return predicate(Ops.NE, left, right);
}
/**
* Create a {@code left not in right} expression
*
* @param <D> type of expressions
* @param left lhs of expression
* @param right rhs of expression
* @return left not in right
*/
public static <D> Predicate notIn(Expression<D> left, CollectionExpression<?,? extends D> right) {
return predicate(Ops.NOT_IN, left, right);
}
/**
* Create a {@code left not in right} expression
*
* @param <D> type of expressions
* @param left lhs of expression
* @param right rhs of expression
* @return left not in right
*/
public static <D> Predicate notIn(Expression<D> left, SubQueryExpression<? extends D> right) {
return predicate(Ops.NOT_IN, left, right);
}
/**
* Create a {@code left not in right} expression
*
* @param <D> type of expressions
* @param left lhs of expression
* @param right rhs of expression
* @return left not in right
*/
public static <D> Predicate notIn(Expression<D> left, Collection<? extends D> right) {
if (right.size() == 1) {
return neConst(left, right.iterator().next());
} else {
return predicate(Ops.NOT_IN, left, ConstantImpl.create(right));
}
}
/**
* Create a {@code left not in right and...} expression for each list
*
* @param <D>
* @param left
* @param lists
* @return a {@code left not in right and...} expression
*/
public static <D> Predicate notInAny(Expression<D> left, Iterable<? extends Collection<? extends D>> lists) {
BooleanBuilder rv = new BooleanBuilder();
for (Collection<? extends D> list : lists) {
rv.and(notIn(left, list));
}
return rv;
}
/**
* Create a {@code left or right} expression
*
* @param left lhs of expression
* @param right rhs of expression
* @return left or right
*/
public static Predicate or(Predicate left, Predicate right) {
left = (Predicate) extract(left);
right = (Predicate) extract(right);
if (left == null) {
return right;
} else if (right == null) {
return left;
} else {
return predicate(Ops.OR, left, right);
}
}
/**
* Create a distinct list of the given args
*
* @param args elements
* @return list with distinct elements
*/
public static ImmutableList<Expression<?>> distinctList(Expression<?>... args) {
final ImmutableList.Builder<Expression<?>> builder = ImmutableList.builder();
final Set<Expression<?>> set = new HashSet<Expression<?>>(args.length);
for (Expression<?> arg : args) {
if (set.add(arg)) {
builder.add(arg);
}
}
return builder.build();
}
/**
* Create a distinct list of the concatenated array contents
*
* @param args elements
* @return list with distinct elements
*/
public static ImmutableList<Expression<?>> distinctList(Expression<?>[]... args) {
final ImmutableList.Builder<Expression<?>> builder = ImmutableList.builder();
final Set<Expression<?>> set = new HashSet<Expression<?>>();
for (Expression<?>[] arr : args) {
for (Expression<?> arg : arr) {
if (set.add(arg)) {
builder.add(arg);
}
}
}
return builder.build();
}
/**
* Get the potentially wrapped expression
*
* @param expr expression to analyze
* @return inner expression
*/
@SuppressWarnings("unchecked")
public static <T> Expression<T> extract(Expression<T> expr) {
if (expr != null) {
final Class<?> clazz = expr.getClass();
if (clazz == PathImpl.class || clazz == PredicateOperation.class || clazz == ConstantImpl.class) {
return expr;
} else {
return (Expression<T>) expr.accept(ExtractorVisitor.DEFAULT, null);
}
} else {
return null;
}
}
/**
* Create a new root variable based on the given path and suffix
*
* @param path base path
* @param suffix suffix for variable name
* @return path expression
*/
public static String createRootVariable(Path<?> path, int suffix) {
String variable = path.accept(ToStringVisitor.DEFAULT, TEMPLATES);
return variable + "_" + suffix;
}
/**
* Create a new root variable based on the given path
*
* @param path base path
* @return variable name
*/
public static String createRootVariable(Path<?> path) {
return path.accept(ToStringVisitor.DEFAULT, TEMPLATES);
}
/**
* Converts the given object to an Expression
*
* <p>Casts expressions and wraps everything else into co</p>
*
* @param o object to convert
* @return converted argument
*/
public static Expression<?> toExpression(Object o) {
if (o instanceof Expression) {
return (Expression<?>) o;
} else {
return ConstantImpl.create(o);
}
}
/**
* Converts the given expression to lower(expression)
*
* <p>Constants are lower()ed at creation time</p>
*
* @param stringExpression the string to lower()
* @return lower(stringExpression)
*/
public static Expression<String> toLower(Expression<String> stringExpression) {
if (stringExpression instanceof Constant) {
Constant<String> constantExpression = (Constant<String>) stringExpression;
return ConstantImpl.create(constantExpression.getConstant().toLowerCase(Locale.ENGLISH));
} else {
return operation(String.class, Ops.LOWER, stringExpression);
}
}
/**
* Create an expression out of the given order specifiers
*
* @param args order
* @return expression for order
*/
public static Expression<?> orderBy(List<OrderSpecifier<?>> args) {
return operation(Object.class, Ops.ORDER, ConstantImpl.create(args));
}
private ExpressionUtils() { }
}
| |
package org.apereo.cas.web.config;
import org.apereo.cas.CentralAuthenticationService;
import org.apereo.cas.authentication.AuthenticationServiceSelectionPlan;
import org.apereo.cas.authentication.AuthenticationSystemSupport;
import org.apereo.cas.authentication.adaptive.AdaptiveAuthenticationPolicy;
import org.apereo.cas.authentication.principal.ServiceFactory;
import org.apereo.cas.configuration.CasConfigurationProperties;
import org.apereo.cas.logout.LogoutManager;
import org.apereo.cas.services.ServicesManager;
import org.apereo.cas.ticket.registry.TicketRegistrySupport;
import org.apereo.cas.web.FlowExecutionExceptionResolver;
import org.apereo.cas.web.flow.FrontChannelLogoutAction;
import org.apereo.cas.web.flow.GatewayServicesManagementCheck;
import org.apereo.cas.web.flow.GenerateServiceTicketAction;
import org.apereo.cas.web.flow.GenericSuccessViewAction;
import org.apereo.cas.web.flow.InitialAuthenticationAction;
import org.apereo.cas.web.flow.InitialAuthenticationRequestValidationAction;
import org.apereo.cas.web.flow.InitialFlowSetupAction;
import org.apereo.cas.web.flow.InitializeLoginAction;
import org.apereo.cas.web.flow.LogoutAction;
import org.apereo.cas.web.flow.SendTicketGrantingTicketAction;
import org.apereo.cas.web.flow.ServiceAuthorizationCheck;
import org.apereo.cas.web.flow.ServiceWarningAction;
import org.apereo.cas.web.flow.TerminateSessionAction;
import org.apereo.cas.web.flow.TicketGrantingTicketCheckAction;
import org.apereo.cas.web.flow.resolver.CasDelegatingWebflowEventResolver;
import org.apereo.cas.web.flow.resolver.CasWebflowEventResolver;
import org.apereo.cas.web.support.ArgumentExtractor;
import org.apereo.cas.web.support.CookieRetrievingCookieGenerator;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import org.springframework.web.servlet.HandlerExceptionResolver;
import org.springframework.webflow.execution.Action;
import java.util.Collections;
/**
* This is {@link CasSupportActionsConfiguration}.
*
* @author Misagh Moayyed
* @since 5.0.0
*/
@Configuration("casSupportActionsConfiguration")
@EnableConfigurationProperties(CasConfigurationProperties.class)
@EnableTransactionManagement(proxyTargetClass = true)
public class CasSupportActionsConfiguration {
@Autowired
@Qualifier("serviceTicketRequestWebflowEventResolver")
private CasWebflowEventResolver serviceTicketRequestWebflowEventResolver;
@Autowired
@Qualifier("initialAuthenticationAttemptWebflowEventResolver")
private CasDelegatingWebflowEventResolver initialAuthenticationAttemptWebflowEventResolver;
@Autowired
@Qualifier("servicesManager")
private ServicesManager servicesManager;
@Autowired
@Qualifier("ticketGrantingTicketCookieGenerator")
private CookieRetrievingCookieGenerator ticketGrantingTicketCookieGenerator;
@Autowired
@Qualifier("warnCookieGenerator")
private CookieRetrievingCookieGenerator warnCookieGenerator;
@Autowired
private CasConfigurationProperties casProperties;
@Autowired
@Qualifier("webApplicationServiceFactory")
private ServiceFactory webApplicationServiceFactory;
@Autowired
@Qualifier("adaptiveAuthenticationPolicy")
private AdaptiveAuthenticationPolicy adaptiveAuthenticationPolicy;
@Autowired
@Qualifier("centralAuthenticationService")
private CentralAuthenticationService centralAuthenticationService;
@Autowired
@Qualifier("defaultAuthenticationSystemSupport")
private AuthenticationSystemSupport authenticationSystemSupport;
@Autowired
@Qualifier("logoutManager")
private LogoutManager logoutManager;
@Autowired
@Qualifier("defaultTicketRegistrySupport")
private TicketRegistrySupport ticketRegistrySupport;
@Autowired
@Qualifier("rankedAuthenticationProviderWebflowEventResolver")
private CasWebflowEventResolver rankedAuthenticationProviderWebflowEventResolver;
@Autowired
@Qualifier("authenticationServiceSelectionPlan")
private AuthenticationServiceSelectionPlan authenticationRequestServiceSelectionStrategies;
@Bean
public HandlerExceptionResolver errorHandlerResolver() {
return new FlowExecutionExceptionResolver();
}
@ConditionalOnMissingBean(name = "authenticationViaFormAction")
@Bean
public Action authenticationViaFormAction() {
return new InitialAuthenticationAction(initialAuthenticationAttemptWebflowEventResolver,
serviceTicketRequestWebflowEventResolver,
adaptiveAuthenticationPolicy);
}
@ConditionalOnMissingBean(name = "serviceAuthorizationCheck")
@Bean
public Action serviceAuthorizationCheck() {
return new ServiceAuthorizationCheck(this.servicesManager, authenticationRequestServiceSelectionStrategies);
}
@ConditionalOnMissingBean(name = "sendTicketGrantingTicketAction")
@Bean
public Action sendTicketGrantingTicketAction() {
return new SendTicketGrantingTicketAction(centralAuthenticationService, servicesManager, ticketGrantingTicketCookieGenerator,
casProperties.getSso().isRenewedAuthn());
}
@RefreshScope
@Bean
@ConditionalOnMissingBean(name = "logoutAction")
public Action logoutAction() {
return new LogoutAction(webApplicationServiceFactory, servicesManager, casProperties.getLogout());
}
@ConditionalOnMissingBean(name = "initializeLoginAction")
@Bean
@RefreshScope
public Action initializeLoginAction() {
return new InitializeLoginAction(servicesManager);
}
@RefreshScope
@Bean
@Autowired
@ConditionalOnMissingBean(name = "initialFlowSetupAction")
public Action initialFlowSetupAction(@Qualifier("argumentExtractor") final ArgumentExtractor argumentExtractor) {
return new InitialFlowSetupAction(Collections.singletonList(argumentExtractor),
servicesManager,
ticketGrantingTicketCookieGenerator,
warnCookieGenerator, casProperties);
}
@RefreshScope
@Bean
@ConditionalOnMissingBean(name = "initialAuthenticationRequestValidationAction")
public Action initialAuthenticationRequestValidationAction() {
return new InitialAuthenticationRequestValidationAction(rankedAuthenticationProviderWebflowEventResolver);
}
@RefreshScope
@Bean
@ConditionalOnMissingBean(name = "genericSuccessViewAction")
public Action genericSuccessViewAction() {
return new GenericSuccessViewAction(centralAuthenticationService, servicesManager, webApplicationServiceFactory,
casProperties.getView().getDefaultRedirectUrl());
}
@Bean
@ConditionalOnMissingBean(name = "generateServiceTicketAction")
public Action generateServiceTicketAction() {
return new GenerateServiceTicketAction(authenticationSystemSupport, centralAuthenticationService, ticketRegistrySupport, servicesManager);
}
@Bean
@ConditionalOnMissingBean(name = "gatewayServicesManagementCheck")
public Action gatewayServicesManagementCheck() {
return new GatewayServicesManagementCheck(this.servicesManager);
}
@Bean
@ConditionalOnMissingBean(name = "frontChannelLogoutAction")
public Action frontChannelLogoutAction() {
return new FrontChannelLogoutAction(this.logoutManager);
}
@Bean
@ConditionalOnMissingBean(name = "ticketGrantingTicketCheckAction")
public Action ticketGrantingTicketCheckAction() {
return new TicketGrantingTicketCheckAction(this.centralAuthenticationService);
}
@Lazy
@Bean
public Action terminateSessionAction() {
return new TerminateSessionAction(centralAuthenticationService, ticketGrantingTicketCookieGenerator,
warnCookieGenerator, casProperties.getLogout());
}
@Bean
@ConditionalOnMissingBean(name = "serviceWarningAction")
public Action serviceWarningAction() {
return new ServiceWarningAction(centralAuthenticationService, authenticationSystemSupport, ticketRegistrySupport, warnCookieGenerator);
}
}
| |
/*******************************************************************************
* Copyright (c) 2013, 2015 EclipseSource.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
******************************************************************************/
package com.eclipsesource.json;
import java.io.IOException;
import java.io.Reader;
class JsonParser {
private static final int MIN_BUFFER_SIZE = 10;
private static final int DEFAULT_BUFFER_SIZE = 1024;
private final Reader reader;
private final char[] buffer;
private int bufferOffset;
private int index;
private int fill;
private int line;
private int lineOffset;
private int current;
private StringBuilder captureBuffer;
private int captureStart;
/*
* | bufferOffset
* v
* [a|b|c|d|e|f|g|h|i|j|k|l|m|n|o|p|q|r|s|t] < input
* [l|m|n|o|p|q|r|s|t|?|?] < buffer
* ^ ^
* | index fill
*/
JsonParser( String string ) {
this( new StringReader( string ),
Math.max( MIN_BUFFER_SIZE, Math.min( DEFAULT_BUFFER_SIZE, string.length() ) ) );
}
JsonParser( Reader reader ) {
this( reader, DEFAULT_BUFFER_SIZE );
}
JsonParser( Reader reader, int buffersize ) {
this.reader = reader;
buffer = new char[ buffersize ];
line = 1;
captureStart = -1;
}
JsonValue parse() throws IOException {
read();
skipWhiteSpace();
JsonValue result = readValue();
skipWhiteSpace();
if( !isEndOfText() ) {
throw error( "Unexpected character" );
}
return result;
}
private JsonValue readValue() throws IOException {
switch( current ) {
case 'n':
return readNull();
case 't':
return readTrue();
case 'f':
return readFalse();
case '"':
return readString();
case '[':
return readArray();
case '{':
return readObject();
case '-':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
return readNumber();
default:
throw expected( "value" );
}
}
private JsonArray readArray() throws IOException {
read();
JsonArray array = new JsonArray();
skipWhiteSpace();
if( readChar( ']' ) ) {
return array;
}
do {
skipWhiteSpace();
array.add( readValue() );
skipWhiteSpace();
} while( readChar( ',' ) );
if( !readChar( ']' ) ) {
throw expected( "',' or ']'" );
}
return array;
}
private JsonObject readObject() throws IOException {
read();
JsonObject object = new JsonObject();
skipWhiteSpace();
if( readChar( '}' ) ) {
return object;
}
do {
skipWhiteSpace();
String name = readName();
skipWhiteSpace();
if( !readChar( ':' ) ) {
throw expected( "':'" );
}
skipWhiteSpace();
object.add( name, readValue() );
skipWhiteSpace();
} while( readChar( ',' ) );
if( !readChar( '}' ) ) {
throw expected( "',' or '}'" );
}
return object;
}
private String readName() throws IOException {
if( current != '"' ) {
throw expected( "name" );
}
return readStringInternal();
}
private JsonValue readNull() throws IOException {
read();
readRequiredChar( 'u' );
readRequiredChar( 'l' );
readRequiredChar( 'l' );
return JsonValue.NULL;
}
private JsonValue readTrue() throws IOException {
read();
readRequiredChar( 'r' );
readRequiredChar( 'u' );
readRequiredChar( 'e' );
return JsonValue.TRUE;
}
private JsonValue readFalse() throws IOException {
read();
readRequiredChar( 'a' );
readRequiredChar( 'l' );
readRequiredChar( 's' );
readRequiredChar( 'e' );
return JsonValue.FALSE;
}
private void readRequiredChar( char ch ) throws IOException {
if( !readChar( ch ) ) {
throw expected( "'" + ch + "'" );
}
}
private JsonValue readString() throws IOException {
return new JsonString( readStringInternal() );
}
private String readStringInternal() throws IOException {
read();
startCapture();
while( current != '"' ) {
if( current == '\\' ) {
pauseCapture();
readEscape();
startCapture();
} else if( current < 0x20 ) {
throw expected( "valid string character" );
} else {
read();
}
}
String string = endCapture();
read();
return string;
}
private void readEscape() throws IOException {
read();
switch( current ) {
case '"':
case '/':
case '\\':
captureBuffer.append( (char)current );
break;
case 'b':
captureBuffer.append( '\b' );
break;
case 'f':
captureBuffer.append( '\f' );
break;
case 'n':
captureBuffer.append( '\n' );
break;
case 'r':
captureBuffer.append( '\r' );
break;
case 't':
captureBuffer.append( '\t' );
break;
case 'u':
char[] hexChars = new char[4];
for( int i = 0; i < 4; i++ ) {
read();
if( !isHexDigit() ) {
throw expected( "hexadecimal digit" );
}
hexChars[i] = (char)current;
}
captureBuffer.append( (char)Integer.parseInt( new String( hexChars ), 16 ) );
break;
default:
throw expected( "valid escape sequence" );
}
read();
}
private JsonValue readNumber() throws IOException {
startCapture();
readChar( '-' );
int firstDigit = current;
if( !readDigit() ) {
throw expected( "digit" );
}
if( firstDigit != '0' ) {
while( readDigit() ) {
}
}
readFraction();
readExponent();
return new JsonNumber( endCapture() );
}
private boolean readFraction() throws IOException {
if( !readChar( '.' ) ) {
return false;
}
if( !readDigit() ) {
throw expected( "digit" );
}
while( readDigit() ) {
}
return true;
}
private boolean readExponent() throws IOException {
if( !readChar( 'e' ) && !readChar( 'E' ) ) {
return false;
}
if( !readChar( '+' ) ) {
readChar( '-' );
}
if( !readDigit() ) {
throw expected( "digit" );
}
while( readDigit() ) {
}
return true;
}
private boolean readChar( char ch ) throws IOException {
if( current != ch ) {
return false;
}
read();
return true;
}
private boolean readDigit() throws IOException {
if( !isDigit() ) {
return false;
}
read();
return true;
}
private void skipWhiteSpace() throws IOException {
while( isWhiteSpace() ) {
read();
}
}
private void read() throws IOException {
if( index == fill ) {
if( captureStart != -1 ) {
captureBuffer.append( buffer, captureStart, fill - captureStart );
captureStart = 0;
}
bufferOffset += fill;
fill = reader.read( buffer, 0, buffer.length );
index = 0;
if( fill == -1 ) {
current = -1;
return;
}
}
if( current == '\n' ) {
line++;
lineOffset = bufferOffset + index;
}
current = buffer[index++];
}
private void startCapture() {
if( captureBuffer == null ) {
captureBuffer = new StringBuilder();
}
captureStart = index - 1;
}
private void pauseCapture() {
int end = current == -1 ? index : index - 1;
captureBuffer.append( buffer, captureStart, end - captureStart );
captureStart = -1;
}
private String endCapture() {
int end = current == -1 ? index : index - 1;
String captured;
if( captureBuffer.length() > 0 ) {
captureBuffer.append( buffer, captureStart, end - captureStart );
captured = captureBuffer.toString();
captureBuffer.setLength( 0 );
} else {
captured = new String( buffer, captureStart, end - captureStart );
}
captureStart = -1;
return captured;
}
private ParseException expected( String expected ) {
if( isEndOfText() ) {
return error( "Unexpected end of input" );
}
return error( "Expected " + expected );
}
private ParseException error( String message ) {
int absIndex = bufferOffset + index;
int column = absIndex - lineOffset;
int offset = isEndOfText() ? absIndex : absIndex - 1;
return new ParseException( message, offset, line, column - 1 );
}
private boolean isWhiteSpace() {
return current == ' ' || current == '\t' || current == '\n' || current == '\r';
}
private boolean isDigit() {
return current >= '0' && current <= '9';
}
private boolean isHexDigit() {
return current >= '0' && current <= '9'
|| current >= 'a' && current <= 'f'
|| current >= 'A' && current <= 'F';
}
private boolean isEndOfText() {
return current == -1;
}
}
| |
/*
* Copyright 2016, 2017 Bert Laverman
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package nl.rakis.fs.info;
import javax.json.Json;
import javax.json.JsonObject;
import javax.json.JsonReader;
import java.io.StringReader;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.Logger;
/**
* The state of the controls.
*/
public class ControlsInfo
extends FSKeylessData
{
private static final Logger log = Logger.getLogger(ControlsInfo.class.getName());
public static final String TYPE_CONTROLS = "Controls";
private double rdr;
private double ele;
private double ail;
private double rdrtr;
private double eletr;
private double ailtr;
private double spl;
private double flp;
private int grs;
private double brk;
private double dr1;
public ControlsInfo() {
super(getType());
}
@Override
public String getKey() {
return "";
}
public static String getType() {
return TYPE_CONTROLS;
}
@Override
public Map<String, String> toMap() {
Map<String,String> result = new HashMap<>();
result.put(JsonFields.FIELD_TYPE, getType());
result.put(JsonFields.FIELD_RUDDER_POS, Double.toString(getRdr()));
result.put(JsonFields.FIELD_RUDDER_TRIM_POS, Double.toString(getRdrtr()));
result.put(JsonFields.FIELD_ELEVATOR_POS, Double.toString(getEle()));
result.put(JsonFields.FIELD_ELEVATOR_TRIM_POS, Double.toString(getEletr()));
result.put(JsonFields.FIELD_AILERON_POS, Double.toString(getAil()));
result.put(JsonFields.FIELD_AILERON_TRIM_POS, Double.toString(getAiltr()));
result.put(JsonFields.FIELD_SPOILERS_POS, Double.toString(getSpl()));
result.put(JsonFields.FIELD_FLAPS_POS, Double.toString(getFlp()));
result.put(JsonFields.FIELD_GEARS_DOWN, Integer.toString(getGrs()));
result.put(JsonFields.FIELD_PARKING_BRAKE_POS, Double.toString(getBrk()));
result.put(JsonFields.FIELD_DOOR_POS, Double.toString(getDr1()));
return result;
}
@Override
public JsonObject toJsonObject() {
return Json.createObjectBuilder()
.add(JsonFields.FIELD_TYPE, getType())
.add(JsonFields.FIELD_RUDDER_POS, getRdr())
.add(JsonFields.FIELD_RUDDER_TRIM_POS, getRdrtr())
.add(JsonFields.FIELD_ELEVATOR_POS, getEle())
.add(JsonFields.FIELD_ELEVATOR_TRIM_POS, getEletr())
.add(JsonFields.FIELD_AILERON_POS, getAil())
.add(JsonFields.FIELD_AILERON_TRIM_POS, getAiltr())
.add(JsonFields.FIELD_SPOILERS_POS, getSpl())
.add(JsonFields.FIELD_FLAPS_POS, getFlp())
.add(JsonFields.FIELD_GEARS_DOWN, getGrs())
.add(JsonFields.FIELD_PARKING_BRAKE_POS, getBrk())
.add(JsonFields.FIELD_DOOR_POS, getDr1())
.build();
}
@Override
public void updateFromJsonObject(JsonObject obj) {
if (!obj.isNull(JsonFields.FIELD_RUDDER_POS)) {
setRdr(obj.getJsonNumber(JsonFields.FIELD_RUDDER_POS).doubleValue());
}
if (!obj.isNull(JsonFields.FIELD_RUDDER_TRIM_POS)) {
setRdrtr(obj.getJsonNumber(JsonFields.FIELD_RUDDER_TRIM_POS).doubleValue());
}
if (!obj.isNull(JsonFields.FIELD_ELEVATOR_POS)) {
setEle(obj.getJsonNumber(JsonFields.FIELD_ELEVATOR_POS).doubleValue());
}
if (!obj.isNull(JsonFields.FIELD_ELEVATOR_TRIM_POS)) {
setEletr(obj.getJsonNumber(JsonFields.FIELD_ELEVATOR_TRIM_POS).doubleValue());
}
if (!obj.isNull(JsonFields.FIELD_AILERON_POS)) {
setAil(obj.getJsonNumber(JsonFields.FIELD_AILERON_POS).doubleValue());
}
if (!obj.isNull(JsonFields.FIELD_AILERON_TRIM_POS)) {
setAiltr(obj.getJsonNumber(JsonFields.FIELD_AILERON_TRIM_POS).doubleValue());
}
if (!obj.isNull(JsonFields.FIELD_SPOILERS_POS)) {
setSpl(obj.getJsonNumber(JsonFields.FIELD_SPOILERS_POS).doubleValue());
}
if (!obj.isNull(JsonFields.FIELD_FLAPS_POS)) {
setFlp(obj.getJsonNumber(JsonFields.FIELD_FLAPS_POS).doubleValue());
}
if (!obj.isNull(JsonFields.FIELD_GEARS_DOWN)) {
setGrs(obj.getInt(JsonFields.FIELD_GEARS_DOWN));
}
if (!obj.isNull(JsonFields.FIELD_PARKING_BRAKE_POS)) {
setBrk(obj.getJsonNumber(JsonFields.FIELD_PARKING_BRAKE_POS).doubleValue());
}
if (!obj.isNull(JsonFields.FIELD_DOOR_POS)) {
setDr1(obj.getJsonNumber(JsonFields.FIELD_DOOR_POS).doubleValue());
}
}
public static ControlsInfo fromJsonObject(JsonObject obj) {
ControlsInfo result = null;
if (obj != null) {
result = new ControlsInfo();
result.setRdr(obj.getJsonNumber(JsonFields.FIELD_RUDDER_POS).doubleValue());
result.setRdrtr(obj.getJsonNumber(JsonFields.FIELD_RUDDER_TRIM_POS).doubleValue());
result.setEle(obj.getJsonNumber(JsonFields.FIELD_ELEVATOR_POS).doubleValue());
result.setEletr(obj.getJsonNumber(JsonFields.FIELD_ELEVATOR_TRIM_POS).doubleValue());
result.setAil(obj.getJsonNumber(JsonFields.FIELD_AILERON_POS).doubleValue());
result.setAiltr(obj.getJsonNumber(JsonFields.FIELD_AILERON_TRIM_POS).doubleValue());
result.setSpl(obj.getJsonNumber(JsonFields.FIELD_SPOILERS_POS).doubleValue());
result.setFlp(obj.getJsonNumber(JsonFields.FIELD_FLAPS_POS).doubleValue());
result.setGrs(obj.getInt(JsonFields.FIELD_GEARS_DOWN));
result.setBrk(obj.getJsonNumber(JsonFields.FIELD_PARKING_BRAKE_POS).doubleValue());
result.setDr1(obj.getJsonNumber(JsonFields.FIELD_DOOR_POS).doubleValue());
}
return result;
}
public static ControlsInfo fromString(String json) {
ControlsInfo result = null;
if (json != null) {
try (StringReader sr = new StringReader(json);
JsonReader jr = Json.createReader(sr)) {
result = fromJsonObject(jr.readObject());
}
}
return result;
}
public double getRdr() {
return rdr;
}
public void setRdr(double rdr) {
this.rdr = rdr;
}
public double getEle() {
return ele;
}
public void setEle(double ele) {
this.ele = ele;
}
public double getAil() {
return ail;
}
public void setAil(double ail) {
this.ail = ail;
}
public double getRdrtr() {
return rdrtr;
}
public void setRdrtr(double rdrtr) {
this.rdrtr = rdrtr;
}
public double getEletr() {
return eletr;
}
public void setEletr(double eletr) {
this.eletr = eletr;
}
public double getAiltr() {
return ailtr;
}
public void setAiltr(double ailtr) {
this.ailtr = ailtr;
}
public double getSpl() {
return spl;
}
public void setSpl(double spl) {
this.spl = spl;
}
public double getFlp() {
return flp;
}
public void setFlp(double flp) {
this.flp = flp;
}
public int getGrs() {
return grs;
}
public void setGrs(int grs) {
this.grs = grs;
}
public double getBrk() {
return brk;
}
public void setBrk(double brk) {
this.brk = brk;
}
public double getDr1() {
return dr1;
}
public void setDr1(double dr1) {
this.dr1 = dr1;
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* @author max
*/
package com.intellij.psi;
import com.intellij.lang.Language;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.impl.PsiDocumentManagerBase;
import com.intellij.psi.impl.SharedPsiElementImplUtil;
import com.intellij.psi.impl.source.PsiFileImpl;
import com.intellij.psi.impl.source.tree.FileElement;
import com.intellij.psi.templateLanguages.OuterLanguageElement;
import com.intellij.util.ConcurrencyUtil;
import com.intellij.util.ReflectionUtil;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import java.util.*;
import java.util.concurrent.ConcurrentMap;
public abstract class MultiplePsiFilesPerDocumentFileViewProvider extends AbstractFileViewProvider {
private final ConcurrentMap<Language, PsiFileImpl> myRoots = ContainerUtil.newConcurrentMap(1, 0.75f, 1);
private MultiplePsiFilesPerDocumentFileViewProvider myOriginal;
public MultiplePsiFilesPerDocumentFileViewProvider(@NotNull PsiManager manager, @NotNull VirtualFile virtualFile, boolean eventSystemEnabled) {
super(manager, virtualFile, eventSystemEnabled, virtualFile.getFileType());
}
@Override
@NotNull
public abstract Language getBaseLanguage();
@Override
@NotNull
public List<PsiFile> getAllFiles() {
final List<PsiFile> roots = new ArrayList<>();
for (Language language : getLanguages()) {
PsiFile psi = getPsi(language);
if (psi != null) roots.add(psi);
}
final PsiFile base = getPsi(getBaseLanguage());
if (!roots.isEmpty() && roots.get(0) != base) {
roots.remove(base);
roots.add(0, base);
}
return roots;
}
protected final void removeFile(@NotNull Language language) {
PsiFileImpl file = myRoots.remove(language);
if (file != null) {
file.markInvalidated();
}
}
@Override
protected PsiFile getPsiInner(@NotNull final Language target) {
PsiFileImpl file = myRoots.get(target);
if (file == null) {
if (isPhysical()) {
VirtualFile virtualFile = getVirtualFile();
if (isIgnored()) return null;
VirtualFile parent = virtualFile.getParent();
if (parent != null) {
getManager().findDirectory(parent);
}
}
if (target != getBaseLanguage() && !getLanguages().contains(target)) {
return null;
}
file = (PsiFileImpl)createFile(target);
if (file == null) return null;
if (myOriginal != null) {
final PsiFile originalFile = myOriginal.getPsi(target);
if (originalFile != null) {
file.setOriginalFile(originalFile);
}
}
file = ConcurrencyUtil.cacheOrGet(myRoots, target, file);
}
return file;
}
@Override
public final PsiFile getCachedPsi(@NotNull Language target) {
return myRoots.get(target);
}
@NotNull
@Override
public final List<PsiFile> getCachedPsiFiles() {
return ContainerUtil.mapNotNull(myRoots.keySet(), this::getCachedPsi);
}
@NotNull
@Override
public final List<FileElement> getKnownTreeRoots() {
List<FileElement> files = new ArrayList<>(myRoots.size());
for (PsiFile file : myRoots.values()) {
final FileElement treeElement = ((PsiFileImpl)file).getTreeElement();
if (treeElement != null) {
files.add(treeElement);
}
}
return files;
}
@TestOnly
public void checkAllTreesEqual() {
Collection<PsiFileImpl> roots = myRoots.values();
PsiDocumentManager documentManager = PsiDocumentManager.getInstance(getManager().getProject());
documentManager.commitAllDocuments();
for (PsiFile root : roots) {
Document document = documentManager.getDocument(root);
assert document != null;
PsiDocumentManagerBase.checkConsistency(root, document);
assert root.getText().equals(document.getText());
}
}
@NotNull
@Override
public final MultiplePsiFilesPerDocumentFileViewProvider createCopy(@NotNull final VirtualFile fileCopy) {
final MultiplePsiFilesPerDocumentFileViewProvider copy = cloneInner(fileCopy);
copy.myOriginal = myOriginal == null ? this : myOriginal;
return copy;
}
@NotNull
protected abstract MultiplePsiFilesPerDocumentFileViewProvider cloneInner(@NotNull VirtualFile fileCopy);
@Override
@Nullable
public PsiElement findElementAt(int offset, @NotNull Class<? extends Language> lang) {
final PsiFile mainRoot = getPsi(getBaseLanguage());
PsiElement ret = null;
for (final Language language : getLanguages()) {
if (!ReflectionUtil.isAssignable(lang, language.getClass())) continue;
if (lang.equals(Language.class) && !getLanguages().contains(language)) continue;
final PsiFile psiRoot = getPsi(language);
final PsiElement psiElement = findElementAt(psiRoot, offset);
if (psiElement == null || psiElement instanceof OuterLanguageElement) continue;
if (ret == null || psiRoot != mainRoot) {
ret = psiElement;
}
}
return ret;
}
@Override
@Nullable
public PsiElement findElementAt(int offset) {
return findElementAt(offset, Language.class);
}
@Override
@Nullable
public PsiReference findReferenceAt(int offset) {
TextRange minRange = new TextRange(0, getContents().length());
PsiReference ret = null;
for (final Language language : getLanguages()) {
final PsiElement psiRoot = getPsi(language);
final PsiReference reference = SharedPsiElementImplUtil.findReferenceAt(psiRoot, offset, language);
if (reference == null) continue;
final TextRange textRange = reference.getRangeInElement().shiftRight(reference.getElement().getTextRange().getStartOffset());
if (minRange.contains(textRange) && !textRange.contains(minRange)) {
minRange = textRange;
ret = reference;
}
}
return ret;
}
@Override
public void contentsSynchronized() {
Set<Language> languages = getLanguages();
for (Iterator<Map.Entry<Language, PsiFileImpl>> iterator = myRoots.entrySet().iterator(); iterator.hasNext(); ) {
Map.Entry<Language, PsiFileImpl> entry = iterator.next();
if (!languages.contains(entry.getKey())) {
PsiFileImpl file = entry.getValue();
iterator.remove();
file.markInvalidated();
}
}
super.contentsSynchronized();
}
@Override
public final void markInvalidated() {
for (PsiFileImpl file : myRoots.values()) {
file.markInvalidated();
}
super.markInvalidated();
}
}
| |
package com.wizzardo.tools.collections;
import java.util.*;
import java.util.regex.Pattern;
/**
* @author: moxa
* Date: 12/17/12
*/
public class CollectionTools {
public static <T> void each(Iterable<T> c, VoidClosure<? super T> closure) {
for (T t : c) {
closure.execute(t);
}
}
public static <K, V> void each(Map<K, V> c, VoidClosure2<? super K, ? super V> closure) {
for (Map.Entry<K, V> e : c.entrySet()) {
closure.execute(e.getKey(), e.getValue());
}
}
public static <T> void eachWithIndex(Iterable<T> c, VoidClosure2<Integer, ? super T> closure) {
int i = 0;
for (T t : c) {
closure.execute(i, t);
i++;
}
}
public static <T, R> List<R> collect(Iterable<T> c, Closure<R, ? super T> closure) {
List<R> l = new ArrayList<R>();
for (T t : c) {
l.add(closure.execute(t));
}
return l;
}
public static <T> List<T> grep(Iterable<T> c, Closure<Boolean, ? super T> closure) {
List<T> l = new ArrayList<T>();
for (T t : c) {
if (closure.execute(t))
l.add(t);
}
return l;
}
public static <T> List<T> grep(Iterable<T> c, Collection r) {
List<T> l = new ArrayList<T>();
for (T t : c) {
if (r.contains(t))
l.add(t);
}
return l;
}
public static <T> List<T> grep(Iterable<T> c, Pattern p) {
List<T> l = new ArrayList<T>();
for (T t : c) {
if (p.matcher(String.valueOf(t)).matches())
l.add(t);
}
return l;
}
public static <T> List<T> grep(Iterable<T> c, Class clazz) {
List<T> l = new ArrayList<T>();
for (T t : c) {
if (clazz.isAssignableFrom(t.getClass()))
l.add(t);
}
return l;
}
public static <T> List<T> findAll(Iterable<T> c, Closure<Boolean, ? super T> closure) {
List<T> l = new ArrayList<T>();
for (T t : c) {
if (closure.execute(t))
l.add(t);
}
return l;
}
public static <T> T find(Iterable<T> c, Closure<Boolean, ? super T> closure) {
for (T t : c) {
if (closure.execute(t))
return t;
}
return null;
}
public static <T> T remove(Iterable<T> c, Closure<Boolean, ? super T> closure) {
Iterator<T> iterator = c.iterator();
while (iterator.hasNext()) {
T t = iterator.next();
if (closure.execute(t)) {
iterator.remove();
return t;
}
}
return null;
}
public static <T, C extends Iterable<T>> C removeAll(C c, Closure<Boolean, ? super T> closure) {
Iterator<T> iterator = c.iterator();
while (iterator.hasNext()) {
T t = iterator.next();
if (closure.execute(t))
iterator.remove();
}
return c;
}
public static <T> boolean every(Iterable<T> c, Closure<Boolean, ? super T> closure) {
boolean b = true;
for (T t : c) {
b &= closure.execute(t);
if (!b) {
return false;
}
}
return b;
}
public static <T> boolean any(Iterable<T> c, Closure<Boolean, ? super T> closure) {
for (T t : c) {
if (closure.execute(t)) {
return true;
}
}
return false;
}
public static <T> String join(Iterable<T> c, String separator) {
StringBuilder sb = new StringBuilder();
for (T t : c) {
if (sb.length() > 0) {
sb.append(separator);
}
sb.append(t);
}
return sb.toString();
}
public static <K, V, T> Map<K, List<V>> group(Iterable<T> c, Closure<K, ? super T> toKey, Closure<V, ? super T> toValue) {
HashMap<K, List<V>> map = new HashMap<K, List<V>>();
for (T t : c) {
K key = toKey.execute(t);
List<V> list = map.get(key);
if (list == null) {
list = new ArrayList<V>();
map.put(key, list);
}
list.add(toValue.execute(t));
}
return map;
}
public static void times(int times, VoidClosure<? super Integer> closure) {
for (int i = 0; i < times; i++) {
closure.execute(i);
}
}
public interface Closure<R, T> {
R execute(T it);
}
public interface Closure2<R, T1, T2> {
R execute(T1 it, T2 it2);
}
public interface Closure3<R, T1, T2, T3> {
R execute(T1 it, T2 it2, T3 it3);
}
public interface Closure4<R, T1, T2, T3, T4> {
R execute(T1 it, T2 it2, T3 it3, T4 it4);
}
public interface Closure5<R, T1, T2, T3, T4, T5> {
R execute(T1 it, T2 it2, T3 it3, T4 it4, T5 it5);
}
public interface Closure6<R, T1, T2, T3, T4, T5, T6> {
R execute(T1 it, T2 it2, T3 it3, T4 it4, T5 it5, T6 it6);
}
public interface Closure7<R, T1, T2, T3, T4, T5, T6, T7> {
R execute(T1 it, T2 it2, T3 it3, T4 it4, T5 it5, T6 it6, T7 it7);
}
public interface Closure8<R, T1, T2, T3, T4, T5, T6, T7, T8> {
R execute(T1 it, T2 it2, T3 it3, T4 it4, T5 it5, T6 it6, T7 it7, T8 it8);
}
public interface Closure9<R, T1, T2, T3, T4, T5, T6, T7, T8, T9> {
R execute(T1 it, T2 it2, T3 it3, T4 it4, T5 it5, T6 it6, T7 it7, T8 it8, T9 it9);
}
public interface Closure10<R, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> {
R execute(T1 it, T2 it2, T3 it3, T4 it4, T5 it5, T6 it6, T7 it7, T8 it8, T9 it9, T10 it10);
}
public interface VoidClosure<T> {
void execute(T t);
}
public interface VoidClosure2<T1, T2> {
void execute(T1 it, T2 it2);
}
public interface VoidClosure3<T1, T2, T3> {
void execute(T1 it, T2 it2, T3 it3);
}
public interface VoidClosure4<T1, T2, T3, T4> {
void execute(T1 it, T2 it2, T3 it3, T4 it4);
}
public interface VoidClosure5<T1, T2, T3, T4, T5> {
void execute(T1 it, T2 it2, T3 it3, T4 it4, T5 it5);
}
public interface VoidClosure6<T1, T2, T3, T4, T5, T6> {
void execute(T1 it, T2 it2, T3 it3, T4 it4, T5 it5, T6 it6);
}
public interface VoidClosure7<T1, T2, T3, T4, T5, T6, T7> {
void execute(T1 it, T2 it2, T3 it3, T4 it4, T5 it5, T6 it6, T7 it7);
}
public interface VoidClosure8<T1, T2, T3, T4, T5, T6, T7, T8> {
void execute(T1 it, T2 it2, T3 it3, T4 it4, T5 it5, T6 it6, T7 it7, T8 it8);
}
public interface VoidClosure9<T1, T2, T3, T4, T5, T6, T7, T8, T9> {
void execute(T1 it, T2 it2, T3 it3, T4 it4, T5 it5, T6 it6, T7 it7, T8 it8, T9 it9);
}
public interface VoidClosure10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> {
void execute(T1 it, T2 it2, T3 it3, T4 it4, T5 it5, T6 it6, T7 it7, T8 it8, T9 it9, T10 it10);
}
}
| |
/*
* Copyright 2008 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.web;
import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
import com.google.common.io.Resources;
import com.google.common.net.HttpHeaders;
import com.google.common.net.MediaType;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.BinaryBitmap;
import com.google.zxing.ChecksumException;
import com.google.zxing.DecodeHintType;
import com.google.zxing.FormatException;
import com.google.zxing.LuminanceSource;
import com.google.zxing.MultiFormatReader;
import com.google.zxing.NotFoundException;
import com.google.zxing.Reader;
import com.google.zxing.ReaderException;
import com.google.zxing.Result;
import com.google.zxing.client.j2se.BufferedImageLuminanceSource;
import com.google.zxing.client.j2se.ImageReader;
import com.google.zxing.common.GlobalHistogramBinarizer;
import com.google.zxing.common.HybridBinarizer;
import com.google.zxing.multi.GenericMultipleBarcodeReader;
import com.google.zxing.multi.MultipleBarcodeReader;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.fileupload.FileUploadException;
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
import org.apache.commons.fileupload.servlet.FileCleanerCleanup;
import org.apache.commons.fileupload.servlet.ServletFileUpload;
import org.apache.commons.io.FileCleaningTracker;
import java.awt.color.CMMException;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.Arrays;
import java.util.Collection;
import java.util.EnumMap;
import java.util.EnumSet;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.imageio.ImageIO;
import javax.servlet.ServletConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* {@link HttpServlet} which decodes images containing barcodes. Given a URL, it will
* retrieve the image and decode it. It can also process image files uploaded via POST.
*
* @author Sean Owen
*/
public final class DecodeServlet extends HttpServlet {
private static final Logger log = Logger.getLogger(DecodeServlet.class.getName());
// No real reason to let people upload more than a 4MB image
private static final long MAX_IMAGE_SIZE = 4000000L;
// No real reason to deal with more than maybe 8.3 megapixels
private static final int MAX_PIXELS = 1 << 23;
private static final byte[] REMAINDER_BUFFER = new byte[32768];
private static final Map<DecodeHintType,Object> HINTS;
private static final Map<DecodeHintType,Object> HINTS_PURE;
static {
HINTS = new EnumMap<>(DecodeHintType.class);
HINTS.put(DecodeHintType.TRY_HARDER, Boolean.TRUE);
HINTS.put(DecodeHintType.POSSIBLE_FORMATS, EnumSet.allOf(BarcodeFormat.class));
HINTS_PURE = new EnumMap<>(HINTS);
HINTS_PURE.put(DecodeHintType.PURE_BARCODE, Boolean.TRUE);
}
private DiskFileItemFactory diskFileItemFactory;
private Iterable<String> blockedURLSubstrings;
@Override
public void init(ServletConfig servletConfig) throws ServletException {
Logger logger = Logger.getLogger("com.google.zxing");
ServletContext context = servletConfig.getServletContext();
logger.addHandler(new ServletContextLogHandler(context));
File repository = (File) context.getAttribute("javax.servlet.context.tempdir");
FileCleaningTracker fileCleaningTracker = FileCleanerCleanup.getFileCleaningTracker(context);
diskFileItemFactory = new DiskFileItemFactory(1 << 16, repository);
diskFileItemFactory.setFileCleaningTracker(fileCleaningTracker);
try {
blockedURLSubstrings =
Resources.readLines(Resources.getResource("/private/uri-block-substrings.txt"), Charsets.UTF_8);
} catch (IOException ioe) {
throw new ServletException(ioe);
}
log.info("Blocking URIs containing: " + blockedURLSubstrings);
}
@Override
protected void doGet(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
String imageURIString = request.getParameter("u");
if (imageURIString == null || imageURIString.isEmpty()) {
log.info("URI was empty");
response.sendRedirect("badurl.jspx");
return;
}
imageURIString = imageURIString.trim();
for (CharSequence substring : blockedURLSubstrings) {
if (imageURIString.contains(substring)) {
log.info("Disallowed URI " + imageURIString);
response.sendRedirect("badurl.jspx");
return;
}
}
URI imageURI;
try {
imageURI = new URI(imageURIString);
// Assume http: if not specified
if (imageURI.getScheme() == null) {
imageURI = new URI("http://" + imageURIString);
}
} catch (URISyntaxException urise) {
log.info("URI " + imageURIString + " was not valid: " + urise);
response.sendRedirect("badurl.jspx");
return;
}
// Shortcut for data URI
if ("data".equals(imageURI.getScheme())) {
try {
BufferedImage image = ImageReader.readDataURIImage(imageURI);
processImage(image, request, response);
} catch (IOException ioe) {
log.info(ioe.toString());
response.sendRedirect("badurl.jspx");
}
return;
}
URL imageURL;
try {
imageURL = imageURI.toURL();
} catch (MalformedURLException ignored) {
log.info("URI was not valid: " + imageURIString);
response.sendRedirect("badurl.jspx");
return;
}
HttpURLConnection connection;
try {
connection = (HttpURLConnection) imageURL.openConnection();
} catch (IllegalArgumentException ignored) {
log.info("URI could not be opened: " + imageURL);
response.sendRedirect("badurl.jspx");
return;
}
connection.setAllowUserInteraction(false);
connection.setReadTimeout(5000);
connection.setConnectTimeout(5000);
connection.setRequestProperty(HttpHeaders.USER_AGENT, "zxing.org");
connection.setRequestProperty(HttpHeaders.CONNECTION, "close");
try {
try {
connection.connect();
} catch (IOException ioe) {
// Encompasses lots of stuff, including
// java.net.SocketException, java.net.UnknownHostException,
// javax.net.ssl.SSLPeerUnverifiedException,
// org.apache.http.NoHttpResponseException,
// org.apache.http.client.ClientProtocolException,
log.info(ioe.toString());
response.sendRedirect("badurl.jspx");
return;
}
InputStream is = null;
try {
is = connection.getInputStream();
if (connection.getResponseCode() != HttpServletResponse.SC_OK) {
log.info("Unsuccessful return code: " + connection.getResponseCode());
response.sendRedirect("badurl.jspx");
return;
}
if (connection.getHeaderFieldInt(HttpHeaders.CONTENT_LENGTH, 0) > MAX_IMAGE_SIZE) {
log.info("Too large");
response.sendRedirect("badimage.jspx");
return;
}
log.info("Decoding " + imageURL);
processStream(is, request, response);
} catch (IOException ioe) {
log.info(ioe.toString());
response.sendRedirect("badurl.jspx");
} finally {
if (is != null) {
consumeRemainder(is);
is.close();
}
}
} finally {
connection.disconnect();
}
}
private static void consumeRemainder(InputStream is) {
try {
int available;
while ((available = is.available()) > 0) {
is.read(REMAINDER_BUFFER, 0, available); // don't care about value, or collision
}
} catch (IOException | IndexOutOfBoundsException ioe) {
// sun.net.www.http.ChunkedInputStream.read is throwing IndexOutOfBoundsException
// continue
}
}
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
if (!ServletFileUpload.isMultipartContent(request)) {
log.info("File upload was not multipart");
response.sendRedirect("badimage.jspx");
return;
}
ServletFileUpload upload = new ServletFileUpload(diskFileItemFactory);
upload.setFileSizeMax(MAX_IMAGE_SIZE);
// Parse the request
try {
for (FileItem item : upload.parseRequest(request)) {
if (!item.isFormField()) {
if (item.getSize() <= MAX_IMAGE_SIZE) {
log.info("Decoding uploaded file");
try (InputStream is = item.getInputStream()) {
processStream(is, request, response);
}
} else {
log.info("Too large");
response.sendRedirect("badimage.jspx");
}
break;
}
}
} catch (FileUploadException fue) {
log.info(fue.toString());
response.sendRedirect("badimage.jspx");
}
}
private static void processStream(InputStream is,
ServletRequest request,
HttpServletResponse response) throws ServletException, IOException {
BufferedImage image;
try {
image = ImageIO.read(is);
} catch (IOException | CMMException | IllegalArgumentException ioe) {
log.info(ioe.toString());
// Have seen these in some logs
response.sendRedirect("badimage.jspx");
return;
}
if (image == null) {
response.sendRedirect("badimage.jspx");
return;
}
if (image.getHeight() <= 1 || image.getWidth() <= 1 ||
image.getHeight() * image.getWidth() > MAX_PIXELS) {
log.info("Dimensions out of bounds: " + image.getWidth() + 'x' + image.getHeight());
response.sendRedirect("badimage.jspx");
return;
}
processImage(image, request, response);
}
private static void processImage(BufferedImage image,
ServletRequest request,
HttpServletResponse response) throws IOException, ServletException {
LuminanceSource source = new BufferedImageLuminanceSource(image);
BinaryBitmap bitmap = new BinaryBitmap(new GlobalHistogramBinarizer(source));
Collection<Result> results = Lists.newArrayListWithCapacity(1);
try {
Reader reader = new MultiFormatReader();
ReaderException savedException = null;
try {
// Look for multiple barcodes
MultipleBarcodeReader multiReader = new GenericMultipleBarcodeReader(reader);
Result[] theResults = multiReader.decodeMultiple(bitmap, HINTS);
if (theResults != null) {
results.addAll(Arrays.asList(theResults));
}
} catch (ReaderException re) {
savedException = re;
}
if (results.isEmpty()) {
try {
// Look for pure barcode
Result theResult = reader.decode(bitmap, HINTS_PURE);
if (theResult != null) {
results.add(theResult);
}
} catch (ReaderException re) {
savedException = re;
}
}
if (results.isEmpty()) {
try {
// Look for normal barcode in photo
Result theResult = reader.decode(bitmap, HINTS);
if (theResult != null) {
results.add(theResult);
}
} catch (ReaderException re) {
savedException = re;
}
}
if (results.isEmpty()) {
try {
// Try again with other binarizer
BinaryBitmap hybridBitmap = new BinaryBitmap(new HybridBinarizer(source));
Result theResult = reader.decode(hybridBitmap, HINTS);
if (theResult != null) {
results.add(theResult);
}
} catch (ReaderException re) {
savedException = re;
}
}
if (results.isEmpty()) {
handleException(savedException, response);
return;
}
} catch (RuntimeException re) {
// Call out unexpected errors in the log clearly
log.log(Level.WARNING, "Unexpected exception from library", re);
throw new ServletException(re);
}
String fullParameter = request.getParameter("full");
boolean minimalOutput = fullParameter != null && !Boolean.parseBoolean(fullParameter);
if (minimalOutput) {
response.setContentType(MediaType.PLAIN_TEXT_UTF_8.toString());
response.setCharacterEncoding(Charsets.UTF_8.name());
try (Writer out = new OutputStreamWriter(response.getOutputStream(), Charsets.UTF_8)) {
for (Result result : results) {
out.write(result.getText());
out.write('\n');
}
}
} else {
request.setAttribute("results", results);
request.getRequestDispatcher("decoderesult.jspx").forward(request, response);
}
}
private static void handleException(ReaderException re, HttpServletResponse response) throws IOException {
if (re instanceof NotFoundException) {
log.info("Not found: " + re);
response.sendRedirect("notfound.jspx");
} else if (re instanceof FormatException) {
log.info("Format problem: " + re);
response.sendRedirect("format.jspx");
} else if (re instanceof ChecksumException) {
log.info("Checksum problem: " + re);
response.sendRedirect("format.jspx");
} else {
log.info("Unknown problem: " + re);
response.sendRedirect("notfound.jspx");
}
}
}
| |
/*
* JBoss, Home of Professional Open Source
* Copyright 2010 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.client.hotrod;
import org.infinispan.Cache;
import org.infinispan.client.hotrod.test.HotRodClientTestingUtil;
import org.infinispan.configuration.cache.ConfigurationBuilder;
import org.infinispan.manager.EmbeddedCacheManager;
import org.infinispan.marshall.Marshaller;
import org.infinispan.marshall.jboss.JBossMarshaller;
import org.infinispan.server.core.CacheValue;
import org.infinispan.server.hotrod.HotRodServer;
import org.infinispan.test.SingleCacheManagerTest;
import org.infinispan.test.fwk.TestCacheManagerFactory;
import org.infinispan.util.ByteArrayKey;
import org.infinispan.util.concurrent.NotifyingFuture;
import org.infinispan.util.logging.Log;
import org.infinispan.util.logging.LogFactory;
import org.testng.annotations.AfterClass;
import org.testng.annotations.Test;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import static org.infinispan.test.TestingUtil.k;
import static org.infinispan.test.TestingUtil.v;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertNotNull;
import static org.testng.AssertJUnit.assertNull;
/**
* @author mmarkus
* @since 4.1
*/
@Test (testName = "client.hotrod.HotRodIntegrationTest", groups = "functional" )
public class HotRodIntegrationTest extends SingleCacheManagerTest {
private static final Log log = LogFactory.getLog(HotRodIntegrationTest.class);
private static final String CACHE_NAME = "replSync";
private Cache cache;
private Cache defaultCache;
RemoteCache defaultRemote;
RemoteCache remoteCache;
private RemoteCacheManager remoteCacheManager;
protected HotRodServer hotrodServer;
@Override
protected EmbeddedCacheManager createCacheManager() throws Exception {
ConfigurationBuilder builder = getDefaultStandaloneCacheConfig(false);
cacheManager = TestCacheManagerFactory.createLocalCacheManager(false);
cacheManager.defineConfiguration(CACHE_NAME, builder.build());
defaultCache = cacheManager.getCache();
cache = cacheManager.getCache(CACHE_NAME);
//pass the config file to the cache
hotrodServer = TestHelper.startHotRodServer(cacheManager);
log.info("Started server on port: " + hotrodServer.getPort());
remoteCacheManager = getRemoteCacheManager();
defaultRemote = remoteCacheManager.getCache();
remoteCache = remoteCacheManager.getCache(CACHE_NAME);
return cacheManager;
}
protected RemoteCacheManager getRemoteCacheManager() {
Properties config = new Properties();
config.put("infinispan.client.hotrod.server_list", "127.0.0.1:" + hotrodServer.getPort());
return new RemoteCacheManager(config);
}
@AfterClass(alwaysRun = true)
public void testDestroyRemoteCacheFactory() {
HotRodClientTestingUtil.killRemoteCacheManager(remoteCacheManager);
HotRodClientTestingUtil.killServers(hotrodServer);
}
public void testPut() throws Exception {
assert null == remoteCache.put("aKey", "aValue");
assertCacheContains(cache, "aKey", "aValue");
assert null == defaultRemote.put("otherKey", "otherValue");
assertCacheContains(defaultCache, "otherKey", "otherValue");
assert remoteCache.containsKey("aKey");
assert defaultRemote.containsKey("otherKey");
assert remoteCache.get("aKey").equals("aValue");
assert defaultRemote.get("otherKey").equals("otherValue");
}
public void testRemove() throws Exception {
assert null == remoteCache.put("aKey", "aValue");
assertCacheContains(cache, "aKey", "aValue");
assert remoteCache.get("aKey").equals("aValue");
assert null == remoteCache.remove("aKey");
assertCacheContains(cache, "aKey", null);
assert !remoteCache.containsKey("aKey");
}
public void testContains() {
assert !remoteCache.containsKey("aKey");
remoteCache.put("aKey", "aValue");
assert remoteCache.containsKey("aKey");
}
public void testGetVersionedCacheEntry() {
VersionedValue value = remoteCache.getVersioned("aKey");
assertNull("expected null but received: " + value, remoteCache.getVersioned("aKey"));
remoteCache.put("aKey", "aValue");
assert remoteCache.get("aKey").equals("aValue");
VersionedValue valueBinary = remoteCache.getVersioned("aKey");
assert valueBinary != null;
assertEquals(valueBinary.getValue(), "aValue");
log.info("Version is: " + valueBinary.getVersion());
//now put the same value
remoteCache.put("aKey", "aValue");
VersionedValue entry2 = remoteCache.getVersioned("aKey");
assertEquals(entry2.getValue(), "aValue");
assert entry2.getVersion() != valueBinary.getVersion();
assert !valueBinary.equals(entry2);
//now put a different value
remoteCache.put("aKey", "anotherValue");
VersionedValue entry3 = remoteCache.getVersioned("aKey");
assertEquals(entry3.getValue(), "anotherValue");
assert entry3.getVersion() != entry2.getVersion();
assert !entry3.equals(entry2);
}
public void testGetWithMetadata() {
MetadataValue<?> value = remoteCache.getWithMetadata("aKey");
assertNull("expected null but received: " + value, value);
remoteCache.put("aKey", "aValue");
assert remoteCache.get("aKey").equals("aValue");
MetadataValue<?> immortalValue = remoteCache.getWithMetadata("aKey");
assertNotNull(immortalValue);
assertEquals("aValue", immortalValue.getValue());
assertEquals(-1, immortalValue.getLifespan());
assertEquals(-1, immortalValue.getMaxIdle());
remoteCache.put("bKey", "bValue", 60, TimeUnit.SECONDS);
MetadataValue<?> mortalValueWithLifespan = remoteCache.getWithMetadata("bKey");
assertNotNull(mortalValueWithLifespan);
assertEquals("bValue", mortalValueWithLifespan.getValue());
assertEquals(60, mortalValueWithLifespan.getLifespan());
assertEquals(-1, mortalValueWithLifespan.getMaxIdle());
remoteCache.put("cKey", "cValue", 60, TimeUnit.SECONDS, 30, TimeUnit.SECONDS);
MetadataValue<?> mortalValueWithMaxIdle = remoteCache.getWithMetadata("cKey");
assertNotNull(mortalValueWithMaxIdle);
assertEquals("cValue", mortalValueWithMaxIdle.getValue());
assertEquals(60, mortalValueWithMaxIdle.getLifespan());
assertEquals(30, mortalValueWithMaxIdle.getMaxIdle());
}
public void testReplace() {
assert null == remoteCache.replace("aKey", "anotherValue");
remoteCache.put("aKey", "aValue");
assert null == remoteCache.replace("aKey", "anotherValue");
assert remoteCache.get("aKey").equals("anotherValue");
}
public void testReplaceIfUnmodified() {
assert null == remoteCache.replace("aKey", "aValue");
remoteCache.put("aKey", "aValue");
VersionedValue valueBinary = remoteCache.getVersioned("aKey");
assert remoteCache.replaceWithVersion("aKey", "aNewValue", valueBinary.getVersion());
VersionedValue entry2 = remoteCache.getVersioned("aKey");
assert entry2.getVersion() != valueBinary.getVersion();
assertEquals(entry2.getValue(), "aNewValue");
assert !remoteCache.replaceWithVersion("aKey", "aNewValue", valueBinary.getVersion());
}
public void testReplaceIfUnmodifiedWithExpiry(Method m) throws InterruptedException {
final int key = 1;
remoteCache.put(key, v(m));
VersionedValue valueBinary = remoteCache.getVersioned(key);
int lifespanSecs = 3; // seconds
long lifespan = TimeUnit.SECONDS.toMillis(lifespanSecs);
long startTime = System.currentTimeMillis();
String newValue = v(m, 2);
assert remoteCache.replaceWithVersion(key, newValue, valueBinary.getVersion(), lifespanSecs);
while (true) {
Object value = remoteCache.get(key);
if (System.currentTimeMillis() >= startTime + lifespan)
break;
assertEquals(v(m, 2), value);
Thread.sleep(100);
}
while (System.currentTimeMillis() < startTime + lifespan + 2000) {
if (remoteCache.get(key) == null) break;
Thread.sleep(50);
}
assertNull(remoteCache.get(key));
}
public void testReplaceWithVersionWithLifespanAsync(Method m) throws Exception {
int lifespanInSecs = 1; //seconds
final String k = k(m), v = v(m), newV = v(m, 2);
assertNull(remoteCache.replace(k, v));
remoteCache.put(k, v);
VersionedValue valueBinary = remoteCache.getVersioned(k);
long lifespan = TimeUnit.SECONDS.toMillis(lifespanInSecs);
long startTime = System.currentTimeMillis();
NotifyingFuture<Boolean> future = remoteCache.replaceWithVersionAsync(
k, newV, valueBinary.getVersion(), lifespanInSecs);
assert future.get();
while (true) {
VersionedValue entry2 = remoteCache.getVersioned(k);
if (System.currentTimeMillis() >= startTime + lifespan)
break;
// version should have changed; value should have changed
assert entry2.getVersion() != valueBinary.getVersion();
assertEquals(newV, entry2.getValue());
Thread.sleep(100);
}
while (System.currentTimeMillis() < startTime + lifespan + 2000) {
if (remoteCache.get(k) == null) break;
Thread.sleep(50);
}
assertNull(remoteCache.getVersioned(k));
}
public void testRemoveIfUnmodified() {
assert !remoteCache.removeWithVersion("aKey", 12321212l);
remoteCache.put("aKey", "aValue");
VersionedValue valueBinary = remoteCache.getVersioned("aKey");
assert remoteCache.removeWithVersion("aKey", valueBinary.getVersion());
assert !cache.containsKey("aKey");
remoteCache.put("aKey", "aNewValue");
VersionedValue entry2 = remoteCache.getVersioned("aKey");
assert entry2.getVersion() != valueBinary.getVersion();
assertEquals(entry2.getValue(), "aNewValue");
assert !remoteCache.removeWithVersion("aKey", valueBinary.getVersion());
}
public void testPutIfAbsent() {
remoteCache.put("aKey", "aValue");
assert null == remoteCache.putIfAbsent("aKey", "anotherValue");
assertEquals(remoteCache.get("aKey"),"aValue");
assertEquals(remoteCache.get("aKey"),"aValue");
assert remoteCache.containsKey("aKey");
assert true : remoteCache.replace("aKey", "anotherValue");
}
public void testClear() {
remoteCache.put("aKey", "aValue");
remoteCache.put("aKey2", "aValue");
remoteCache.clear();
assert !remoteCache.containsKey("aKey");
assert !remoteCache.containsKey("aKey2");
assert cache.isEmpty();
}
private void assertCacheContains(Cache cache, String key, String value) throws Exception {
Marshaller marshaller = new JBossMarshaller();
byte[] keyBytes = marshaller.objectToByteBuffer(key, 64);
byte[] valueBytes = marshaller.objectToByteBuffer(value, 64);
ByteArrayKey cacheKey = new ByteArrayKey(keyBytes);
CacheValue cacheValue = (CacheValue) cache.get(cacheKey);
if (value == null) {
assert cacheValue == null : "Expected null value but received: " + cacheValue;
} else {
assert Arrays.equals(valueBytes, (byte[])cacheValue.data());
}
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.java;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.devtools.build.lib.packages.Aspect.INJECTING_RULE_KIND_PARAMETER_KEY;
import static java.nio.charset.StandardCharsets.ISO_8859_1;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicates;
import com.google.common.collect.Collections2;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.devtools.build.lib.actions.Action;
import com.google.devtools.build.lib.actions.ActionEnvironment;
import com.google.devtools.build.lib.actions.ActionOwner;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.ArtifactOwner;
import com.google.devtools.build.lib.actions.CommandLineExpansionException;
import com.google.devtools.build.lib.actions.EmptyRunfilesSupplier;
import com.google.devtools.build.lib.actions.ParameterFile;
import com.google.devtools.build.lib.actions.ResourceSet;
import com.google.devtools.build.lib.actions.Root;
import com.google.devtools.build.lib.actions.extra.ExtraActionInfo;
import com.google.devtools.build.lib.actions.extra.JavaCompileInfo;
import com.google.devtools.build.lib.analysis.AnalysisEnvironment;
import com.google.devtools.build.lib.analysis.RuleContext;
import com.google.devtools.build.lib.analysis.actions.CommandLine;
import com.google.devtools.build.lib.analysis.actions.CustomCommandLine;
import com.google.devtools.build.lib.analysis.actions.CustomCommandLine.CustomMultiArgv;
import com.google.devtools.build.lib.analysis.actions.CustomCommandLine.VectorArg;
import com.google.devtools.build.lib.analysis.actions.ParameterFileWriteAction;
import com.google.devtools.build.lib.analysis.actions.SpawnAction;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration.StrictDepsMode;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.collect.nestedset.Order;
import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable;
import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadCompatible;
import com.google.devtools.build.lib.rules.java.JavaConfiguration.JavaClasspathMode;
import com.google.devtools.build.lib.skyframe.AspectValue;
import com.google.devtools.build.lib.util.LazyString;
import com.google.devtools.build.lib.util.StringCanonicalizer;
import com.google.devtools.build.lib.vfs.PathFragment;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
/** Action that represents a Java compilation. */
@ThreadCompatible
@Immutable
public final class JavaCompileAction extends SpawnAction {
private static final String JACOCO_INSTRUMENTATION_PROCESSOR = "jacoco";
private static final ResourceSet LOCAL_RESOURCES =
ResourceSet.createWithRamCpuIo(750 /*MB*/, 0.5 /*CPU*/, 0.0 /*IO*/);
/** Environment variable that sets the UTF-8 charset. */
static final ImmutableMap<String, String> UTF8_ENVIRONMENT =
ImmutableMap.of("LC_CTYPE", "en_US.UTF-8");
// TODO(#3320): This is missing the configuration's action environment!
static final ActionEnvironment UTF8_ACTION_ENVIRONMENT =
ActionEnvironment.create(UTF8_ENVIRONMENT);
private final CommandLine javaCompileCommandLine;
private final CommandLine commandLine;
/**
* The directory in which generated classfiles are placed.
* May be erased/created by the JavaBuilder.
*/
private final PathFragment classDirectory;
private final Artifact outputJar;
/**
* The list of classpath entries to specify to javac.
*/
private final NestedSet<Artifact> classpathEntries;
/** The list of bootclasspath entries to specify to javac. */
private final ImmutableList<Artifact> bootclasspathEntries;
/** The list of sourcepath entries to specify to javac. */
private final ImmutableList<Artifact> sourcePathEntries;
/**
* The path to the extdir to specify to javac.
*/
private final ImmutableList<Artifact> extdirInputs;
/** The list of classpath entries to search for annotation processors. */
private final NestedSet<Artifact> processorPath;
/**
* The list of annotation processor classes to run.
*/
private final ImmutableList<String> processorNames;
/** Set of additional Java source files to compile. */
private final ImmutableList<Artifact> sourceJars;
/** The set of explicit Java source files to compile. */
private final ImmutableSet<Artifact> sourceFiles;
/**
* The compiler options to pass to javac.
*/
private final ImmutableList<String> javacOpts;
/** The subset of classpath jars provided by direct dependencies. */
private final NestedSet<Artifact> directJars;
/**
* The level of strict dependency checks (off, warnings, or errors).
*/
private final BuildConfiguration.StrictDepsMode strictJavaDeps;
/** The set of .jdeps artifacts provided by direct dependencies. */
private final NestedSet<Artifact> compileTimeDependencyArtifacts;
/**
* Constructs an action to compile a set of Java source files to class files.
*
* @param owner the action owner, typically a java_* RuleConfiguredTarget.
* @param tools the tools used by the action
* @param inputs the inputs of the action
* @param outputs the outputs of the action
* @param javaCompileCommandLine the command line for the java library builder - it's actually
* written to the parameter file, but other parts (for example, ide_build_info) need access to
* the data
* @param commandLine the actual invocation command line
* @param classDirectory the directory in which generated classfiles are placed
* @param outputJar the jar file the compilation outputs will be written to
* @param classpathEntries the compile-time classpath entries
* @param bootclasspathEntries the compile-time bootclasspath entries
* @param extdirInputs the compile-time extclasspath entries
* @param processorPath the classpath to search for annotation processors
* @param processorNames the annotation processors to run
* @param sourceJars jars of sources to compile
* @param sourceFiles source files to compile
* @param javacOpts the javac options for the compilation
* @param directJars the subset of classpath jars provided by direct dependencies
* @param executionInfo the execution info
* @param strictJavaDeps the Strict Java Deps mode
* @param compileTimeDependencyArtifacts the jdeps files for direct dependencies
* @param progressMessage the progress message
*/
private JavaCompileAction(
ActionOwner owner,
NestedSet<Artifact> tools,
NestedSet<Artifact> inputs,
Collection<Artifact> outputs,
CommandLine javaCompileCommandLine,
CommandLine commandLine,
PathFragment classDirectory,
Artifact outputJar,
NestedSet<Artifact> classpathEntries,
ImmutableList<Artifact> bootclasspathEntries,
ImmutableList<Artifact> sourcePathEntries,
ImmutableList<Artifact> extdirInputs,
NestedSet<Artifact> processorPath,
List<String> processorNames,
Collection<Artifact> sourceJars,
ImmutableSet<Artifact> sourceFiles,
List<String> javacOpts,
NestedSet<Artifact> directJars,
Map<String, String> executionInfo,
StrictDepsMode strictJavaDeps,
NestedSet<Artifact> compileTimeDependencyArtifacts,
CharSequence progressMessage) {
super(
owner,
tools,
inputs,
outputs,
LOCAL_RESOURCES,
commandLine,
false,
// TODO(#3320): This is missing the configuration's action environment!
UTF8_ACTION_ENVIRONMENT,
ImmutableMap.copyOf(executionInfo),
progressMessage,
EmptyRunfilesSupplier.INSTANCE,
"Javac",
false /*executeUnconditionally*/,
null /*extraActionInfoSupplier*/);
this.javaCompileCommandLine = javaCompileCommandLine;
this.commandLine = commandLine;
this.classDirectory = checkNotNull(classDirectory);
this.outputJar = outputJar;
this.classpathEntries = classpathEntries;
this.bootclasspathEntries = ImmutableList.copyOf(bootclasspathEntries);
this.sourcePathEntries = ImmutableList.copyOf(sourcePathEntries);
this.extdirInputs = extdirInputs;
this.processorPath = processorPath;
this.processorNames = ImmutableList.copyOf(processorNames);
this.sourceJars = ImmutableList.copyOf(sourceJars);
this.sourceFiles = sourceFiles;
this.javacOpts = ImmutableList.copyOf(javacOpts);
this.directJars = checkNotNull(directJars, "directJars must not be null");
this.strictJavaDeps = strictJavaDeps;
this.compileTimeDependencyArtifacts = compileTimeDependencyArtifacts;
}
/** Returns the given (passed to constructor) source files. */
@VisibleForTesting
ImmutableSet<Artifact> getSourceFiles() {
return sourceFiles;
}
/**
* Returns the list of paths that represents the classpath.
*/
@VisibleForTesting
public Iterable<Artifact> getClasspath() {
return classpathEntries;
}
/** Returns the list of paths that represents the bootclasspath. */
@VisibleForTesting
Collection<Artifact> getBootclasspath() {
return bootclasspathEntries;
}
/** Returns the list of paths that represents the sourcepath. */
@VisibleForTesting
public Collection<Artifact> getSourcePathEntries() {
return sourcePathEntries;
}
/**
* Returns the path to the extdir.
*/
@VisibleForTesting
public Collection<Artifact> getExtdir() {
return extdirInputs;
}
/**
* Returns the list of paths that represents the source jars.
*/
@VisibleForTesting
public Collection<Artifact> getSourceJars() {
return sourceJars;
}
/** Returns the list of paths that represents the processor path. */
@VisibleForTesting
public NestedSet<Artifact> getProcessorpath() {
return processorPath;
}
@VisibleForTesting
public List<String> getJavacOpts() {
return javacOpts;
}
@VisibleForTesting
public NestedSet<Artifact> getDirectJars() {
return directJars;
}
@VisibleForTesting
public NestedSet<Artifact> getCompileTimeDependencyArtifacts() {
return compileTimeDependencyArtifacts;
}
@VisibleForTesting
public BuildConfiguration.StrictDepsMode getStrictJavaDepsMode() {
return strictJavaDeps;
}
public PathFragment getClassDirectory() {
return classDirectory;
}
/**
* Returns the list of class names of processors that should
* be run.
*/
@VisibleForTesting
public List<String> getProcessorNames() {
return processorNames;
}
/**
* Returns the output jar artifact that gets generated by archiving the results of the Java
* compilation.
*/
public Artifact getOutputJar() {
return outputJar;
}
@Override
public Artifact getPrimaryOutput() {
return getOutputJar();
}
/**
* Constructs a command line that can be used to invoke the JavaBuilder.
*
* <p>Do not use this method, except for testing (and for the in-process strategy).
*/
@VisibleForTesting
public Iterable<String> buildCommandLine() {
try {
return javaCompileCommandLine.arguments();
} catch (CommandLineExpansionException e) {
throw new AssertionError("JavaCompileAction command line expansion cannot fail");
}
}
/** Returns the command and arguments for a java compile action. */
public List<String> getCommand() {
try {
return ImmutableList.copyOf(commandLine.arguments());
} catch (CommandLineExpansionException e) {
throw new AssertionError("JavaCompileAction command line expansion cannot fail");
}
}
@Override
public String toString() {
try {
StringBuilder result = new StringBuilder();
result.append("JavaBuilder ");
Joiner.on(' ').appendTo(result, commandLine.arguments());
return result.toString();
} catch (CommandLineExpansionException e) {
return "Error expanding command line";
}
}
@Override
public ExtraActionInfo.Builder getExtraActionInfo() {
JavaCompileInfo.Builder info = JavaCompileInfo.newBuilder();
info.addAllSourceFile(Artifact.toExecPaths(getSourceFiles()));
info.addAllClasspath(Artifact.toExecPaths(getClasspath()));
info.addAllBootclasspath(Artifact.toExecPaths(getBootclasspath()));
info.addAllSourcepath(Artifact.toExecPaths(getSourceJars()));
info.addAllJavacOpt(getJavacOpts());
info.addAllProcessor(getProcessorNames());
info.addAllProcessorpath(Artifact.toExecPaths(getProcessorpath()));
info.setOutputjar(getOutputJar().getExecPathString());
try {
return super.getExtraActionInfo().setExtension(JavaCompileInfo.javaCompileInfo, info.build());
} catch (CommandLineExpansionException e) {
throw new AssertionError("JavaCompileAction command line expansion cannot fail");
}
}
/**
* Builds the list of mappings between jars on the classpath and their originating targets names.
*/
@VisibleForTesting
static class JarsToTargetsArgv extends CustomMultiArgv {
private final Iterable<Artifact> classpath;
private final NestedSet<Artifact> directJars;
@VisibleForTesting
JarsToTargetsArgv(Iterable<Artifact> classpath, NestedSet<Artifact> directJars) {
this.classpath = classpath;
this.directJars = directJars;
}
@Override
public Iterable<String> argv() {
Set<Artifact> directJarSet = directJars.toSet();
ImmutableList.Builder<String> builder = ImmutableList.builder();
for (Artifact jar : classpath) {
builder.add(directJarSet.contains(jar) ? "--direct_dependency" : "--indirect_dependency");
builder.add(jar.getExecPathString());
builder.add(getArtifactOwnerGeneralizedLabel(jar));
}
return builder.build();
}
private String getArtifactOwnerGeneralizedLabel(Artifact artifact) {
ArtifactOwner owner = checkNotNull(artifact.getArtifactOwner(), artifact);
StringBuilder result = new StringBuilder();
Label label = owner.getLabel();
result.append(
label.getPackageIdentifier().getRepository().isDefault()
|| label.getPackageIdentifier().getRepository().isMain()
? label.toString()
// Escape '@' prefix for .params file.
: "@" + label);
if (owner instanceof AspectValue.AspectKey) {
AspectValue.AspectKey aspectOwner = (AspectValue.AspectKey) owner;
ImmutableCollection<String> injectingRuleKind =
aspectOwner.getParameters().getAttribute(INJECTING_RULE_KIND_PARAMETER_KEY);
if (injectingRuleKind.size() == 1) {
result.append(' ').append(getOnlyElement(injectingRuleKind));
}
}
return result.toString();
}
}
/** Creates an ArgvFragment containing the common initial command line arguments */
private static CustomMultiArgv spawnCommandLineBase(
final PathFragment javaExecutable,
final Artifact javaBuilderJar,
final ImmutableList<Artifact> instrumentationJars,
final ImmutableList<String> javaBuilderJvmFlags,
final String javaBuilderMainClass,
final String pathDelimiter) {
return new CustomMultiArgv() {
@Override
public Iterable<String> argv() {
checkNotNull(javaBuilderJar);
CustomCommandLine.Builder builder =
CustomCommandLine.builder().addPath(javaExecutable).addAll(javaBuilderJvmFlags);
if (!instrumentationJars.isEmpty()) {
builder
.addExecPaths(
"-cp",
VectorArg.join(pathDelimiter)
.each(
ImmutableList.<Artifact>builder()
.addAll(instrumentationJars)
.add(javaBuilderJar)
.build()))
.addDynamicString(javaBuilderMainClass);
} else {
// If there are no instrumentation jars, use simpler '-jar' option to launch JavaBuilder.
builder.addExecPath("-jar", javaBuilderJar);
}
return builder.build().arguments();
}
};
}
/**
* Tells {@link Builder} how to create new artifacts. Is there so that {@link Builder} can be
* exercised in tests without creating a full {@link RuleContext}.
*/
public interface ArtifactFactory {
/**
* Create an artifact with the specified root-relative path under the specified root.
*/
Artifact create(PathFragment rootRelativePath, Root root);
}
@VisibleForTesting
static ArtifactFactory createArtifactFactory(final AnalysisEnvironment env) {
return new ArtifactFactory() {
@Override
public Artifact create(PathFragment rootRelativePath, Root root) {
return env.getDerivedArtifact(rootRelativePath, root);
}
};
}
/**
* Builder class to construct Java compile actions.
*/
public static class Builder {
private final ActionOwner owner;
private final AnalysisEnvironment analysisEnvironment;
private final ArtifactFactory artifactFactory;
private final BuildConfiguration configuration;
private final JavaSemantics semantics;
private PathFragment javaExecutable;
private List<Artifact> javabaseInputs = ImmutableList.of();
private Artifact outputJar;
private Artifact gensrcOutputJar;
private Artifact manifestProtoOutput;
private Artifact outputDepsProto;
private Collection<Artifact> additionalOutputs;
private Artifact paramFile;
private Artifact metadata;
private ImmutableSet<Artifact> sourceFiles = ImmutableSet.of();
private final Collection<Artifact> sourceJars = new ArrayList<>();
private BuildConfiguration.StrictDepsMode strictJavaDeps =
BuildConfiguration.StrictDepsMode.OFF;
private NestedSet<Artifact> directJars = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER);
private NestedSet<Artifact> compileTimeDependencyArtifacts =
NestedSetBuilder.emptySet(Order.STABLE_ORDER);
private List<String> javacOpts = new ArrayList<>();
private ImmutableList<String> javacJvmOpts = ImmutableList.of();
private ImmutableMap<String, String> executionInfo = ImmutableMap.of();
private boolean compressJar;
private NestedSet<Artifact> classpathEntries =
NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER);
private ImmutableList<Artifact> bootclasspathEntries = ImmutableList.of();
private ImmutableList<Artifact> sourcePathEntries = ImmutableList.of();
private ImmutableList<Artifact> extdirInputs = ImmutableList.of();
private Artifact javaBuilderJar;
private Artifact langtoolsJar;
private NestedSet<Artifact> toolsJars = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER);
private ImmutableList<Artifact> instrumentationJars = ImmutableList.of();
private PathFragment sourceGenDirectory;
private PathFragment tempDirectory;
private PathFragment classDirectory;
private NestedSet<Artifact> processorPath = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER);
private final List<String> processorNames = new ArrayList<>();
private String ruleKind;
private Label targetLabel;
private boolean testOnly = false;
/**
* Creates a Builder from an owner and a build configuration.
*/
public Builder(ActionOwner owner, AnalysisEnvironment analysisEnvironment,
ArtifactFactory artifactFactory, BuildConfiguration configuration,
JavaSemantics semantics) {
this.owner = owner;
this.analysisEnvironment = analysisEnvironment;
this.artifactFactory = artifactFactory;
this.configuration = configuration;
this.semantics = semantics;
}
/**
* Creates a Builder from an owner and a build configuration.
*/
public Builder(final RuleContext ruleContext, JavaSemantics semantics) {
this(ruleContext.getActionOwner(),
ruleContext.getAnalysisEnvironment(),
new ArtifactFactory() {
@Override
public Artifact create(PathFragment rootRelativePath, Root root) {
return ruleContext.getDerivedArtifact(rootRelativePath, root);
}
},
ruleContext.getConfiguration(), semantics);
}
public JavaCompileAction build() {
// TODO(bazel-team): all the params should be calculated before getting here, and the various
// aggregation code below should go away.
final String pathSeparator = configuration.getHostPathSeparator();
final List<String> internedJcopts = new ArrayList<>();
for (String jcopt : javacOpts) {
internedJcopts.add(StringCanonicalizer.intern(jcopt));
}
// Invariant: if strictJavaDeps is OFF, then directJars and
// dependencyArtifacts are ignored
if (strictJavaDeps == BuildConfiguration.StrictDepsMode.OFF) {
directJars = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER);
compileTimeDependencyArtifacts = NestedSetBuilder.emptySet(Order.STABLE_ORDER);
}
// Invariant: if java_classpath is set to 'off', dependencyArtifacts are ignored
JavaConfiguration javaConfiguration = configuration.getFragment(JavaConfiguration.class);
if (javaConfiguration.getReduceJavaClasspath() == JavaClasspathMode.OFF) {
compileTimeDependencyArtifacts = NestedSetBuilder.emptySet(Order.STABLE_ORDER);
}
if (paramFile == null) {
paramFile = artifactFactory.create(
ParameterFile.derivePath(outputJar.getRootRelativePath()),
configuration.getBinDirectory(targetLabel.getPackageIdentifier().getRepository()));
}
Preconditions.checkState(javaExecutable != null, owner);
ImmutableList.Builder<Artifact> outputsBuilder = ImmutableList.<Artifact>builder()
.addAll(
new ArrayList<>(Collections2.filter(Arrays.asList(
outputJar,
metadata,
gensrcOutputJar,
manifestProtoOutput,
outputDepsProto), Predicates.notNull())));
if (additionalOutputs != null) {
outputsBuilder.addAll(additionalOutputs);
}
ImmutableList<Artifact> outputs = outputsBuilder.build();
CustomCommandLine paramFileContents = buildParamFileContents(internedJcopts);
Action parameterFileWriteAction = new ParameterFileWriteAction(owner, paramFile,
paramFileContents, ParameterFile.ParameterFileType.UNQUOTED, ISO_8859_1);
analysisEnvironment.registerAction(parameterFileWriteAction);
CustomMultiArgv spawnCommandLineBase =
spawnCommandLineBase(
javaExecutable,
javaBuilderJar,
instrumentationJars,
javacJvmOpts,
semantics.getJavaBuilderMainClass(),
pathSeparator);
// The actual params-file-based command line executed for a compile action.
CommandLine javaBuilderCommandLine =
CustomCommandLine.builder()
.addCustomMultiArgv(spawnCommandLineBase)
.addFormatted("@%s", paramFile.getExecPath())
.build();
NestedSet<Artifact> tools =
NestedSetBuilder.<Artifact>stableOrder()
.add(langtoolsJar)
.addTransitive(toolsJars)
.add(javaBuilderJar)
.addAll(instrumentationJars)
.build();
NestedSet<Artifact> inputs =
NestedSetBuilder.<Artifact>stableOrder()
.addTransitive(classpathEntries)
.addTransitive(compileTimeDependencyArtifacts)
.addTransitive(processorPath)
.addAll(sourceJars)
.addAll(sourceFiles)
.addAll(javabaseInputs)
.addAll(bootclasspathEntries)
.addAll(sourcePathEntries)
.addAll(extdirInputs)
.add(paramFile)
.addTransitive(tools)
.build();
return new JavaCompileAction(
owner,
tools,
inputs,
outputs,
paramFileContents,
javaBuilderCommandLine,
classDirectory,
outputJar,
classpathEntries,
bootclasspathEntries,
sourcePathEntries,
extdirInputs,
processorPath,
processorNames,
sourceJars,
sourceFiles,
internedJcopts,
directJars,
executionInfo,
strictJavaDeps,
compileTimeDependencyArtifacts,
getProgressMessage());
}
private CustomCommandLine buildParamFileContents(Collection<String> javacOpts) {
checkNotNull(classDirectory, "classDirectory should not be null");
checkNotNull(tempDirectory, "tempDirectory should not be null");
CustomCommandLine.Builder result = CustomCommandLine.builder();
result.add("--classdir").addPath(classDirectory);
result.add("--tempdir").addPath(tempDirectory);
if (outputJar != null) {
result.addExecPath("--output", outputJar);
}
if (sourceGenDirectory != null) {
result.add("--sourcegendir").addPath(sourceGenDirectory);
}
if (gensrcOutputJar != null) {
result.addExecPath("--generated_sources_output", gensrcOutputJar);
}
if (manifestProtoOutput != null) {
result.addExecPath("--output_manifest_proto", manifestProtoOutput);
}
if (compressJar) {
result.add("--compress_jar");
}
if (outputDepsProto != null) {
result.addExecPath("--output_deps_proto", outputDepsProto);
}
if (!extdirInputs.isEmpty()) {
result.addExecPaths("--extclasspath", extdirInputs);
}
if (!bootclasspathEntries.isEmpty()) {
result.addExecPaths("--bootclasspath", bootclasspathEntries);
}
if (!sourcePathEntries.isEmpty()) {
result.addExecPaths("--sourcepath", sourcePathEntries);
}
if (!processorPath.isEmpty()) {
result.addExecPaths("--processorpath", processorPath);
}
if (!processorNames.isEmpty()) {
result.addAll("--processors", ImmutableList.copyOf(processorNames));
}
if (!sourceJars.isEmpty()) {
result.addExecPaths("--source_jars", ImmutableList.copyOf(sourceJars));
}
if (!sourceFiles.isEmpty()) {
result.addExecPaths("--sources", sourceFiles);
}
if (!javacOpts.isEmpty()) {
result.addAll("--javacopts", ImmutableList.copyOf(javacOpts));
}
if (ruleKind != null) {
result.add("--rule_kind", ruleKind);
}
if (targetLabel != null) {
result.add("--target_label");
if (targetLabel.getPackageIdentifier().getRepository().isDefault()
|| targetLabel.getPackageIdentifier().getRepository().isMain()) {
result.addLabel(targetLabel);
} else {
// @-prefixed strings will be assumed to be filenames and expanded by
// {@link JavaLibraryBuildRequest}, so add an extra &at; to escape it.
result.addPrefixedLabel("@", targetLabel);
}
}
if (testOnly) {
result.add("--testonly");
}
if (!classpathEntries.isEmpty()) {
result.addExecPaths("--classpath", classpathEntries);
}
// strict_java_deps controls whether the mapping from jars to targets is
// written out and whether we try to minimize the compile-time classpath.
if (strictJavaDeps != BuildConfiguration.StrictDepsMode.OFF) {
result.add("--strict_java_deps", strictJavaDeps.toString());
result.addCustomMultiArgv(new JarsToTargetsArgv(classpathEntries, directJars));
if (configuration.getFragment(JavaConfiguration.class).getReduceJavaClasspath()
== JavaClasspathMode.JAVABUILDER) {
result.add("--reduce_classpath");
if (!compileTimeDependencyArtifacts.isEmpty()) {
result.addExecPaths("--deps_artifacts", compileTimeDependencyArtifacts);
}
}
}
if (metadata != null) {
result.add("--post_processor");
result.addExecPath(JACOCO_INSTRUMENTATION_PROCESSOR, metadata);
result.addPath(
configuration
.getCoverageMetadataDirectory(targetLabel.getPackageIdentifier().getRepository())
.getExecPath());
result.add("-*Test");
result.add("-*TestCase");
}
return result.build();
}
private LazyString getProgressMessage() {
Artifact outputJar = this.outputJar;
int sourceFileCount = sourceFiles.size();
int sourceJarCount = sourceJars.size();
String annotationProcessorNames = getProcessorNames();
return new LazyString() {
@Override
public String toString() {
StringBuilder sb = new StringBuilder("Building ");
sb.append(outputJar.prettyPrint());
sb.append(" (");
boolean first = true;
first = appendCount(sb, first, sourceFileCount, "source file");
first = appendCount(sb, first, sourceJarCount, "source jar");
sb.append(")");
sb.append(annotationProcessorNames);
return sb.toString();
}
};
}
private String getProcessorNames() {
if (processorNames.isEmpty()) {
return "";
}
StringBuilder sb = new StringBuilder();
List<String> shortNames = new ArrayList<>();
for (String name : processorNames) {
// Annotation processor names are qualified class names. Omit the package part for the
// progress message, e.g. `com.google.Foo` -> `Foo`.
int idx = name.lastIndexOf('.');
String shortName = idx != -1 ? name.substring(idx + 1) : name;
shortNames.add(shortName);
}
sb.append(" and running annotation processors (");
Joiner.on(", ").appendTo(sb, shortNames);
sb.append(")");
return sb.toString();
}
/**
* Append an input count to the progress message, e.g. "2 source jars". If an input count has
* already been appended, prefix with ", ".
*/
private static boolean appendCount(StringBuilder sb, boolean first, int count, String name) {
if (count > 0) {
if (!first) {
sb.append(", ");
} else {
first = false;
}
sb.append(count).append(' ').append(name);
if (count > 1) {
sb.append('s');
}
}
return first;
}
public Builder setParameterFile(Artifact paramFile) {
this.paramFile = paramFile;
return this;
}
public Builder setJavaExecutable(PathFragment javaExecutable) {
this.javaExecutable = javaExecutable;
return this;
}
public Builder setJavaBaseInputs(Iterable<Artifact> javabaseInputs) {
this.javabaseInputs = ImmutableList.copyOf(javabaseInputs);
return this;
}
public Builder setOutputJar(Artifact outputJar) {
this.outputJar = outputJar;
return this;
}
public Builder setGensrcOutputJar(Artifact gensrcOutputJar) {
this.gensrcOutputJar = gensrcOutputJar;
return this;
}
public Builder setManifestProtoOutput(Artifact manifestProtoOutput) {
this.manifestProtoOutput = manifestProtoOutput;
return this;
}
public Builder setOutputDepsProto(Artifact outputDepsProto) {
this.outputDepsProto = outputDepsProto;
return this;
}
public Builder setAdditionalOutputs(Collection<Artifact> outputs) {
this.additionalOutputs = outputs;
return this;
}
public Builder setMetadata(Artifact metadata) {
this.metadata = metadata;
return this;
}
public Builder setSourceFiles(ImmutableSet<Artifact> sourceFiles) {
this.sourceFiles = sourceFiles;
return this;
}
public Builder addSourceJars(Collection<Artifact> sourceJars) {
this.sourceJars.addAll(sourceJars);
return this;
}
/**
* Sets the strictness of Java dependency checking, see {@link
* com.google.devtools.build.lib.analysis.config.BuildConfiguration.StrictDepsMode}.
*/
public Builder setStrictJavaDeps(BuildConfiguration.StrictDepsMode strictDeps) {
strictJavaDeps = strictDeps;
return this;
}
/** Accumulates the given jar artifacts as being provided by direct dependencies. */
public Builder setDirectJars(NestedSet<Artifact> directJars) {
this.directJars = checkNotNull(directJars, "directJars must not be null");
return this;
}
public Builder setCompileTimeDependencyArtifacts(NestedSet<Artifact> dependencyArtifacts) {
checkNotNull(compileTimeDependencyArtifacts, "dependencyArtifacts must not be null");
this.compileTimeDependencyArtifacts = dependencyArtifacts;
return this;
}
public Builder setJavacOpts(Iterable<String> copts) {
this.javacOpts = ImmutableList.copyOf(copts);
return this;
}
public Builder setJavacJvmOpts(ImmutableList<String> opts) {
this.javacJvmOpts = opts;
return this;
}
public Builder setJavacExecutionInfo(ImmutableMap<String, String> executionInfo) {
this.executionInfo = executionInfo;
return this;
}
public Builder setCompressJar(boolean compressJar) {
this.compressJar = compressJar;
return this;
}
public Builder setClasspathEntries(NestedSet<Artifact> classpathEntries) {
this.classpathEntries = classpathEntries;
return this;
}
public Builder setBootclasspathEntries(Iterable<Artifact> bootclasspathEntries) {
this.bootclasspathEntries = ImmutableList.copyOf(bootclasspathEntries);
return this;
}
public Builder setSourcePathEntries(Iterable<Artifact> sourcePathEntries) {
this.sourcePathEntries = ImmutableList.copyOf(sourcePathEntries);
return this;
}
public Builder setExtdirInputs(Iterable<Artifact> extdirEntries) {
this.extdirInputs = ImmutableList.copyOf(extdirEntries);
return this;
}
/**
* Sets the directory where source files generated by annotation processors should be stored.
*/
public Builder setSourceGenDirectory(PathFragment sourceGenDirectory) {
this.sourceGenDirectory = sourceGenDirectory;
return this;
}
public Builder setTempDirectory(PathFragment tempDirectory) {
this.tempDirectory = tempDirectory;
return this;
}
public Builder setClassDirectory(PathFragment classDirectory) {
this.classDirectory = classDirectory;
return this;
}
public Builder setProcessorPaths(NestedSet<Artifact> processorPaths) {
this.processorPath = processorPaths;
return this;
}
public Builder addProcessorNames(Collection<String> processorNames) {
this.processorNames.addAll(processorNames);
return this;
}
public Builder setLangtoolsJar(Artifact langtoolsJar) {
this.langtoolsJar = langtoolsJar;
return this;
}
/** Sets the tools jars. */
public Builder setToolsJars(NestedSet<Artifact> toolsJars) {
checkNotNull(toolsJars, "toolsJars must not be null");
this.toolsJars = toolsJars;
return this;
}
public Builder setJavaBuilderJar(Artifact javaBuilderJar) {
this.javaBuilderJar = javaBuilderJar;
return this;
}
public Builder setInstrumentationJars(Iterable<Artifact> instrumentationJars) {
this.instrumentationJars = ImmutableList.copyOf(instrumentationJars);
return this;
}
public Builder setRuleKind(String ruleKind) {
this.ruleKind = ruleKind;
return this;
}
public Builder setTargetLabel(Label targetLabel) {
this.targetLabel = targetLabel;
return this;
}
public Builder setTestOnly(boolean testOnly) {
this.testOnly = testOnly;
return this;
}
}
}
| |
/**
* $RCSfile: $
* $Revision: $
* $Date: $
*
* Copyright (C) 2005-2008 Jive Software. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.openfire.pep;
import java.util.Collection;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.dom4j.DocumentHelper;
import org.dom4j.Element;
import org.dom4j.QName;
import org.jivesoftware.openfire.PacketRouter;
import org.jivesoftware.openfire.SessionManager;
import org.jivesoftware.openfire.XMPPServer;
import org.jivesoftware.openfire.commands.AdHocCommandManager;
import org.jivesoftware.openfire.entitycaps.EntityCapabilities;
import org.jivesoftware.openfire.entitycaps.EntityCapabilitiesManager;
import org.jivesoftware.openfire.pubsub.CollectionNode;
import org.jivesoftware.openfire.pubsub.DefaultNodeConfiguration;
import org.jivesoftware.openfire.pubsub.LeafNode;
import org.jivesoftware.openfire.pubsub.Node;
import org.jivesoftware.openfire.pubsub.NodeSubscription;
import org.jivesoftware.openfire.pubsub.PendingSubscriptionsCommand;
import org.jivesoftware.openfire.pubsub.PubSubEngine;
import org.jivesoftware.openfire.pubsub.PubSubPersistenceManager;
import org.jivesoftware.openfire.pubsub.PubSubService;
import org.jivesoftware.openfire.pubsub.PublishedItem;
import org.jivesoftware.openfire.pubsub.WakeupProvider;
import org.jivesoftware.openfire.pubsub.models.AccessModel;
import org.jivesoftware.openfire.pubsub.models.PublisherModel;
import org.jivesoftware.openfire.roster.Roster;
import org.jivesoftware.openfire.roster.RosterItem;
import org.jivesoftware.openfire.session.ClientSession;
import org.jivesoftware.openfire.user.UserNotFoundException;
import org.jivesoftware.util.LocaleUtils;
import org.jivesoftware.util.StringUtils;
import org.jivesoftware.util.XMPPDateTimeFormat;
import org.jivesoftware.util.cache.Cacheable;
import org.xmpp.packet.JID;
import org.xmpp.packet.Message;
import org.xmpp.packet.Packet;
import org.xmpp.packet.PacketExtension;
/**
* A PEPService is a {@link PubSubService} for use with XEP-0163: "Personal Eventing via
* Pubsub" Version 1.0
*
* Note: Although this class implements {@link Cacheable}, instances should only be
* cached in caches that have time-based (as opposed to size-based) eviction policies.
*
* @author Armando Jagucki
*/
public class PEPService implements PubSubService, Cacheable {
/**
* The bare JID that this service is identified by.
*/
private String serviceOwnerJID;
/**
* Collection node that acts as the root node of the entire node hierarchy.
*/
private CollectionNode rootCollectionNode = null;
/**
* Nodes managed by this service, table: key nodeID (String); value Node
*/
private Map<String, Node> nodes = new ConcurrentHashMap<String, Node>();
/**
* The packet router for the server.
*/
private PacketRouter router = null;
/**
* Default configuration to use for newly created leaf nodes.
*/
private DefaultNodeConfiguration leafDefaultConfiguration;
/**
* Default configuration to use for newly created collection nodes.
*/
private DefaultNodeConfiguration collectionDefaultConfiguration;
/**
* Returns the permission policy for creating nodes. A true value means that
* not anyone can create a node, only the service admin.
*/
private boolean nodeCreationRestricted = true;
/**
* Keep a registry of the presence's show value of users that subscribed to
* a node of the pep service and for which the node only delivers
* notifications for online users or node subscriptions deliver events based
* on the user presence show value. Offline users will not have an entry in
* the map. Note: Key-> bare JID and Value-> Map whose key is full JID of
* connected resource and value is show value of the last received presence.
*/
private Map<String, Map<String, String>> barePresences = new ConcurrentHashMap<String, Map<String, String>>();
/**
* Manager that keeps the list of ad-hoc commands and processing command
* requests.
*/
private AdHocCommandManager adHocCommandManager;
/**
* Used to handle filtered-notifications.
*/
private EntityCapabilitiesManager entityCapsManager = EntityCapabilitiesManager.getInstance();
/**
* Constructs a PEPService.
*
* @param server the XMPP server.
* @param bareJID the bare JID (service ID) of the user owning the service.
*/
public PEPService(XMPPServer server, String bareJID) {
this.serviceOwnerJID = bareJID;
router = server.getPacketRouter();
// Initialize the ad-hoc commands manager to use for this pep service
adHocCommandManager = new AdHocCommandManager();
adHocCommandManager.addCommand(new PendingSubscriptionsCommand(this));
// Load default configuration for leaf nodes
leafDefaultConfiguration = PubSubPersistenceManager.loadDefaultConfiguration(this, true);
if (leafDefaultConfiguration == null) {
// Create and save default configuration for leaf nodes;
leafDefaultConfiguration = new DefaultNodeConfiguration(true);
leafDefaultConfiguration.setAccessModel(AccessModel.presence);
leafDefaultConfiguration.setPublisherModel(PublisherModel.publishers);
leafDefaultConfiguration.setDeliverPayloads(true);
leafDefaultConfiguration.setLanguage("English");
leafDefaultConfiguration.setMaxPayloadSize(5120);
leafDefaultConfiguration.setNotifyConfigChanges(true);
leafDefaultConfiguration.setNotifyDelete(true);
leafDefaultConfiguration.setNotifyRetract(true);
leafDefaultConfiguration.setPersistPublishedItems(false);
leafDefaultConfiguration.setMaxPublishedItems(-1);
leafDefaultConfiguration.setPresenceBasedDelivery(false);
leafDefaultConfiguration.setSendItemSubscribe(true);
leafDefaultConfiguration.setSubscriptionEnabled(true);
leafDefaultConfiguration.setReplyPolicy(null);
PubSubPersistenceManager.createDefaultConfiguration(this, leafDefaultConfiguration);
}
// Load default configuration for collection nodes
collectionDefaultConfiguration = PubSubPersistenceManager.loadDefaultConfiguration(this, false);
if (collectionDefaultConfiguration == null) {
// Create and save default configuration for collection nodes;
collectionDefaultConfiguration = new DefaultNodeConfiguration(false);
collectionDefaultConfiguration.setAccessModel(AccessModel.presence);
collectionDefaultConfiguration.setPublisherModel(PublisherModel.publishers);
collectionDefaultConfiguration.setDeliverPayloads(false);
collectionDefaultConfiguration.setLanguage("English");
collectionDefaultConfiguration.setNotifyConfigChanges(true);
collectionDefaultConfiguration.setNotifyDelete(true);
collectionDefaultConfiguration.setNotifyRetract(true);
collectionDefaultConfiguration.setPresenceBasedDelivery(false);
collectionDefaultConfiguration.setSubscriptionEnabled(true);
collectionDefaultConfiguration.setReplyPolicy(null);
collectionDefaultConfiguration.setAssociationPolicy(CollectionNode.LeafNodeAssociationPolicy.all);
collectionDefaultConfiguration.setMaxLeafNodes(-1);
PubSubPersistenceManager.createDefaultConfiguration(this, collectionDefaultConfiguration);
}
// Load nodes to memory
PubSubPersistenceManager.loadNodes(this);
// Ensure that we have a root collection node
if (nodes.isEmpty()) {
// Create root collection node
JID creatorJID = new JID(bareJID);
rootCollectionNode = new CollectionNode(this, null, bareJID, creatorJID);
// Add the creator as the node owner
rootCollectionNode.addOwner(creatorJID);
// Save new root node
rootCollectionNode.saveToDB();
}
else {
rootCollectionNode = (CollectionNode) getNode(bareJID);
}
}
public void addNode(Node node) {
nodes.put(node.getNodeID(), node);
}
public void removeNode(String nodeID) {
nodes.remove(nodeID);
}
public Node getNode(String nodeID) {
return nodes.get(nodeID);
}
public Collection<Node> getNodes() {
return nodes.values();
}
public CollectionNode getRootCollectionNode() {
return rootCollectionNode;
}
public JID getAddress() {
return new JID(serviceOwnerJID);
}
public String getServiceID() {
// The bare JID of the user is the service ID for PEP
return serviceOwnerJID;
}
public DefaultNodeConfiguration getDefaultNodeConfiguration(boolean leafType) {
if (leafType) {
return leafDefaultConfiguration;
}
return collectionDefaultConfiguration;
}
public Collection<String> getShowPresences(JID subscriber) {
return PubSubEngine.getShowPresences(this, subscriber);
}
public boolean canCreateNode(JID creator) {
// Node creation is always allowed for sysadmin
if (isNodeCreationRestricted() && !isServiceAdmin(creator)) {
// The user is not allowed to create nodes
return false;
}
return true;
}
/**
* Returns true if the the prober is allowed to see the presence of the probee.
*
* @param prober the user that is trying to probe the presence of another user.
* @param probee the username of the uset that is being probed.
* @return true if the the prober is allowed to see the presence of the probee.
* @throws UserNotFoundException If the probee does not exist in the local server or the prober
* is not present in the roster of the probee.
*/
private boolean canProbePresence(JID prober, JID probee) throws UserNotFoundException {
Roster roster;
roster = XMPPServer.getInstance().getRosterManager().getRoster(prober.getNode());
RosterItem item = roster.getRosterItem(probee);
if (item.getSubStatus() == RosterItem.SUB_BOTH || item.getSubStatus() == RosterItem.SUB_FROM) {
return true;
}
return false;
}
public boolean isCollectionNodesSupported() {
return true;
}
public boolean isInstantNodeSupported() {
return true;
}
public boolean isMultipleSubscriptionsEnabled() {
return false;
}
public boolean isServiceAdmin(JID user) {
// Here we consider a 'service admin' to be the user that this PEPService
// is associated with.
if (serviceOwnerJID.equals(user.toBareJID())) {
return true;
}
else {
return false;
}
}
public boolean isNodeCreationRestricted() {
return nodeCreationRestricted;
}
public void presenceSubscriptionNotRequired(Node node, JID user) {
PubSubEngine.presenceSubscriptionNotRequired(this, node, user);
}
public void presenceSubscriptionRequired(Node node, JID user) {
PubSubEngine.presenceSubscriptionRequired(this, node, user);
}
public void send(Packet packet) {
router.route(packet);
}
public void broadcast(Node node, Message message, Collection<JID> jids) {
message.setFrom(getAddress());
for (JID jid : jids) {
message.setTo(jid);
message.setID(node.getNodeID() + "__" + jid.toBareJID() + "__" + StringUtils.randomString(5));
router.route(message);
}
}
public void sendNotification(Node node, Message message, JID recipientJID) {
message.setTo(recipientJID);
message.setFrom(getAddress());
message.setID(node.getNodeID() + "__" + recipientJID.toBareJID() + "__" + StringUtils.randomString(5));
// If the recipient subscribed with a bare JID and this PEPService can retrieve
// presence information for the recipient, collect all of their full JIDs and
// send the notification to each below.
Set<JID> recipientFullJIDs = new HashSet<JID>();
if (XMPPServer.getInstance().isLocal(recipientJID)) {
if (recipientJID.getResource() == null) {
for (ClientSession clientSession : SessionManager.getInstance().getSessions(recipientJID.getNode())) {
recipientFullJIDs.add(clientSession.getAddress());
}
}
}
else {
// Since recipientJID is not local, try to get presence info from cached known remote
// presences.
// TODO: OF-605 the old code depends on a cache that would contain presence state on all (?!) JIDS on all (?!)
// remote domains. As we cannot depend on this information to be correct (even if we could ensure that this
// potentially unlimited amount of data would indeed be manageable in the first place), this code was removed.
recipientFullJIDs.add(recipientJID);
}
if (recipientFullJIDs.isEmpty()) {
router.route(message);
return;
}
for (JID recipientFullJID : recipientFullJIDs) {
// Include an Extended Stanza Addressing "replyto" extension specifying the publishing
// resource. However, only include the extension if the receiver has a presence subscription
// to the service owner.
try {
JID publisher = null;
// Get the ID of the node that had an item published to or retracted from.
Element itemsElement = message.getElement().element("event").element("items");
String nodeID = itemsElement.attributeValue("node");
// Get the ID of the item that was published or retracted.
String itemID = null;
Element itemElement = itemsElement.element("item");
if (itemElement == null) {
Element retractElement = itemsElement.element("retract");
if (retractElement != null) {
itemID = retractElement.attributeValue("id");
}
}
else {
itemID = itemElement.attributeValue("id");
}
// Check if the recipientFullJID is interested in notifications for this node.
// If the recipient has not yet requested any notification filtering, continue and send
// the notification.
EntityCapabilities entityCaps = entityCapsManager.getEntityCapabilities(recipientFullJID);
if (entityCaps != null) {
if (!entityCaps.containsFeature(nodeID + "+notify")) {
return;
}
}
// Get the full JID of the item publisher from the node that was published to.
// This full JID will be used as the "replyto" address in the addressing extension.
if (node.isCollectionNode()) {
for (Node leafNode : node.getNodes()) {
if (leafNode.getNodeID().equals(nodeID)) {
publisher = leafNode.getPublishedItem(itemID).getPublisher();
// Ensure the recipientJID has access to receive notifications for items published to the leaf node.
AccessModel accessModel = leafNode.getAccessModel();
if (!accessModel.canAccessItems(leafNode, recipientFullJID, publisher)) {
return;
}
break;
}
}
}
else {
publisher = node.getPublishedItem(itemID).getPublisher();
}
// Ensure the recipient is subscribed to the service owner's (publisher's) presence.
if (canProbePresence(publisher, recipientFullJID)) {
Element addresses = DocumentHelper.createElement(QName.get("addresses", "http://jabber.org/protocol/address"));
Element address = addresses.addElement("address");
address.addAttribute("type", "replyto");
address.addAttribute("jid", publisher.toString());
Message extendedMessage = message.createCopy();
extendedMessage.addExtension(new PacketExtension(addresses));
extendedMessage.setTo(recipientFullJID);
router.route(extendedMessage);
}
}
catch (IndexOutOfBoundsException e) {
// Do not add addressing extension to message.
}
catch (UserNotFoundException e) {
// Do not add addressing extension to message.
router.route(message);
}
catch (NullPointerException e) {
try {
if (canProbePresence(getAddress(), recipientFullJID)) {
message.setTo(recipientFullJID);
}
}
catch (UserNotFoundException e1) {
// Do nothing
}
router.route(message);
}
}
}
/**
* Sends an event notification for the last published item of each leaf node under the
* root collection node to the recipient JID. If the recipient has no subscription to
* the root collection node, has not yet been authorized, or is pending to be
* configured -- then no notifications are going to be sent.<p>
*
* Depending on the subscription configuration the event notifications may or may not have
* a payload, may not be sent if a keyword (i.e. filter) was defined and it was not matched.
*
* @param recipientJID the recipient that is to receive the last published item notifications.
*/
public void sendLastPublishedItems(JID recipientJID) {
// Ensure the recipient has a subscription to this service's root collection node.
NodeSubscription subscription = rootCollectionNode.getSubscription(recipientJID);
if (subscription == null) {
subscription = rootCollectionNode.getSubscription(new JID(recipientJID.toBareJID()));
}
if (subscription == null) {
return;
}
// Send the last published item of each leaf node to the recipient.
for (Node leafNode : rootCollectionNode.getNodes()) {
// Retrieve last published item for the leaf node.
PublishedItem leafLastPublishedItem = null;
leafLastPublishedItem = leafNode.getLastPublishedItem();
if (leafLastPublishedItem == null) {
continue;
}
// Check if the published item can be sent to the subscriber
if (!subscription.canSendPublicationEvent(leafLastPublishedItem.getNode(), leafLastPublishedItem)) {
return;
}
// Send event notification to the subscriber
Message notification = new Message();
Element event = notification.getElement().addElement("event", "http://jabber.org/protocol/pubsub#event");
Element items = event.addElement("items");
items.addAttribute("node", leafLastPublishedItem.getNodeID());
Element item = items.addElement("item");
if (((LeafNode) leafLastPublishedItem.getNode()).isItemRequired()) {
item.addAttribute("id", leafLastPublishedItem.getID());
}
if (leafLastPublishedItem.getNode().isPayloadDelivered() && leafLastPublishedItem.getPayload() != null) {
item.add(leafLastPublishedItem.getPayload().createCopy());
}
// Add a message body (if required)
if (subscription.isIncludingBody()) {
notification.setBody(LocaleUtils.getLocalizedString("pubsub.notification.message.body"));
}
// Include date when published item was created
notification.getElement().addElement("delay", "urn:xmpp:delay").addAttribute("stamp", XMPPDateTimeFormat.format(leafLastPublishedItem.getCreationDate()));
// Send the event notification to the subscriber
this.sendNotification(subscription.getNode(), notification, subscription.getJID());
}
}
public Map<String, Map<String, String>> getBarePresences() {
return barePresences;
}
public AdHocCommandManager getManager() {
return adHocCommandManager;
}
public int getCachedSize() {
// Rather arbitrary. Don't use this for size-based eviction policies!
return 600;
}
public void setWakeupProvider(WakeupProvider resolver) {
// [Magnet] Not supported.
}
public WakeupProvider getWakeupProvider() {
// [Magnet]
return null;
}
}
| |
package org.hisp.dhis.schema;
/*
* Copyright (c) 2004-2015, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlElementWrapper;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty;
import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlRootElement;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.primitives.Primitives;
import org.hisp.dhis.common.IdentifiableObject;
import org.hisp.dhis.common.NameableObject;
import org.hisp.dhis.common.annotation.Description;
import org.hisp.dhis.system.util.ReflectionUtils;
import org.springframework.util.StringUtils;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
/**
* Default PropertyIntrospectorService implementation that uses Reflection and Jackson annotations
* for reading in properties.
*
* @author Morten Olav Hansen <mortenoh@gmail.com>
*/
public class Jackson2PropertyIntrospectorService
extends AbstractPropertyIntrospectorService
{
@Override
protected Map<String, Property> scanClass( Class<?> clazz )
{
Map<String, Property> propertyMap = Maps.newHashMap();
Map<String, Property> hibernatePropertyMap = getPropertiesFromHibernate( clazz );
List<String> classFieldNames = ReflectionUtils.getAllFieldNames( clazz );
// TODO this is quite nasty, should find a better way of exposing properties at class-level
if ( clazz.isAnnotationPresent( JacksonXmlRootElement.class ) )
{
Property property = new Property();
JacksonXmlRootElement jacksonXmlRootElement = clazz.getAnnotation( JacksonXmlRootElement.class );
if ( !StringUtils.isEmpty( jacksonXmlRootElement.localName() ) )
{
property.setName( jacksonXmlRootElement.localName() );
}
if ( !StringUtils.isEmpty( jacksonXmlRootElement.namespace() ) )
{
property.setNamespace( jacksonXmlRootElement.namespace() );
}
propertyMap.put( "__self__", property );
}
List<Property> properties = collectProperties( clazz );
for ( Property property : properties )
{
Method getterMethod = property.getGetterMethod();
JsonProperty jsonProperty = getterMethod.getAnnotation( JsonProperty.class );
String fieldName = getFieldName( getterMethod );
property.setName( !StringUtils.isEmpty( jsonProperty.value() ) ? jsonProperty.value() : fieldName );
if ( property.getGetterMethod() != null )
{
property.setReadable( true );
}
if ( property.getSetterMethod() != null )
{
property.setWritable( true );
}
if ( classFieldNames.contains( fieldName ) )
{
property.setFieldName( fieldName );
}
if ( hibernatePropertyMap.containsKey( fieldName ) )
{
Property hibernateProperty = hibernatePropertyMap.get( fieldName );
property.setPersisted( true );
property.setWritable( true );
property.setUnique( hibernateProperty.isUnique() );
property.setRequired( hibernateProperty.isRequired() );
property.setLength( hibernateProperty.getLength() );
property.setMax( hibernateProperty.getMax() );
property.setMin( hibernateProperty.getMin() );
property.setCollection( hibernateProperty.isCollection() );
property.setCascade( hibernateProperty.getCascade() );
property.setOwner( hibernateProperty.isOwner() );
property.setManyToMany( hibernateProperty.isManyToMany() );
property.setOneToOne( hibernateProperty.isOneToOne() );
property.setManyToOne( hibernateProperty.isManyToOne() );
property.setOwningRole( hibernateProperty.getOwningRole() );
property.setInverseRole( hibernateProperty.getInverseRole() );
property.setGetterMethod( hibernateProperty.getGetterMethod() );
property.setSetterMethod( hibernateProperty.getSetterMethod() );
}
if ( property.getGetterMethod().isAnnotationPresent( Description.class ) )
{
Description description = property.getGetterMethod().getAnnotation( Description.class );
property.setDescription( description.value() );
}
if ( property.getGetterMethod().isAnnotationPresent( JacksonXmlProperty.class ) )
{
JacksonXmlProperty jacksonXmlProperty = getterMethod.getAnnotation( JacksonXmlProperty.class );
if ( StringUtils.isEmpty( jacksonXmlProperty.localName() ) )
{
property.setName( property.getName() );
}
else
{
property.setName( jacksonXmlProperty.localName() );
}
if ( !StringUtils.isEmpty( jacksonXmlProperty.namespace() ) )
{
property.setNamespace( jacksonXmlProperty.namespace() );
}
property.setAttribute( jacksonXmlProperty.isAttribute() );
}
Class<?> returnType = property.getGetterMethod().getReturnType();
property.setKlass( Primitives.wrap( returnType ) );
if ( Collection.class.isAssignableFrom( returnType ) )
{
property.setCollection( true );
property.setCollectionName( property.getName() );
Type type = property.getGetterMethod().getGenericReturnType();
if ( ParameterizedType.class.isInstance( type ) )
{
ParameterizedType parameterizedType = (ParameterizedType) type;
Class<?> klass = (Class<?>) parameterizedType.getActualTypeArguments()[0];
property.setItemKlass( Primitives.wrap( klass ) );
if ( collectProperties( klass ).isEmpty() )
{
property.setSimple( true );
}
if ( IdentifiableObject.class.isAssignableFrom( klass ) )
{
property.setIdentifiableObject( true );
if ( NameableObject.class.isAssignableFrom( klass ) )
{
property.setNameableObject( true );
}
}
}
}
else
{
if ( collectProperties( returnType ).isEmpty() )
{
property.setSimple( true );
}
}
if ( property.isCollection() )
{
if ( property.getGetterMethod().isAnnotationPresent( JacksonXmlElementWrapper.class ) )
{
JacksonXmlElementWrapper jacksonXmlElementWrapper = getterMethod.getAnnotation( JacksonXmlElementWrapper.class );
property.setCollectionWrapping( jacksonXmlElementWrapper.useWrapping() );
// TODO what if element-wrapper have different namespace?
if ( !StringUtils.isEmpty( jacksonXmlElementWrapper.localName() ) )
{
property.setCollectionName( jacksonXmlElementWrapper.localName() );
}
}
propertyMap.put( property.getCollectionName(), property );
}
else
{
propertyMap.put( property.getName(), property );
}
if ( Enum.class.isAssignableFrom( property.getKlass() ) )
{
Object[] enumConstants = property.getKlass().getEnumConstants();
List<String> enumValues = new ArrayList<>();
for ( Object value : enumConstants )
{
enumValues.add( value.toString() );
}
property.setConstants( enumValues );
}
SchemaUtils.updatePropertyTypes( property );
}
return propertyMap;
}
private String getFieldName( Method method )
{
String name;
String[] getters = new String[]{
"is", "has", "get"
};
name = method.getName();
for ( String getter : getters )
{
if ( name.startsWith( getter ) )
{
name = name.substring( getter.length() );
}
}
return StringUtils.uncapitalize( name );
}
private List<Property> collectProperties( Class<?> klass )
{
Map<String, Method> methodMap = ReflectionUtils.getMethodMap( klass );
List<Property> properties = Lists.newArrayList();
for ( Method method : methodMap.values() )
{
if ( method.isAnnotationPresent( JsonProperty.class ) )
{
if ( method.getGenericParameterTypes().length == 0 )
{
String fieldName = getFieldName( method );
String setterName = "set" + StringUtils.capitalize( fieldName );
Property property = new Property( klass, method, null );
property.setFieldName( fieldName );
property.setSetterMethod( methodMap.get( setterName ) );
properties.add( property );
}
}
}
return properties;
}
}
| |
/*
* Copyright 2013 Jon S Akhtar (Sylvanaar)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sylvanaar.idea.Lua.lang.psi.impl;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiComment;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFileFactory;
import com.intellij.psi.PsiReference;
import com.intellij.psi.util.PsiTreeUtil;
import com.sylvanaar.idea.Lua.LuaFileType;
import com.sylvanaar.idea.Lua.debugger.LuaCodeFragment;
import com.sylvanaar.idea.Lua.lang.luadoc.psi.api.LuaDocComment;
import com.sylvanaar.idea.Lua.lang.luadoc.psi.api.LuaDocParameterReference;
import com.sylvanaar.idea.Lua.lang.luadoc.psi.api.LuaDocReferenceElement;
import com.sylvanaar.idea.Lua.lang.luadoc.psi.api.LuaDocTag;
import com.sylvanaar.idea.Lua.lang.psi.*;
import com.sylvanaar.idea.Lua.lang.psi.expressions.LuaDeclarationExpression;
import com.sylvanaar.idea.Lua.lang.psi.expressions.LuaExpression;
import com.sylvanaar.idea.Lua.lang.psi.lists.LuaIdentifierList;
import com.sylvanaar.idea.Lua.lang.psi.statements.*;
import com.sylvanaar.idea.Lua.lang.psi.symbols.LuaCompoundIdentifier;
import com.sylvanaar.idea.Lua.lang.psi.symbols.LuaIdentifier;
import com.sylvanaar.idea.Lua.lang.psi.symbols.LuaSymbol;
import com.sylvanaar.idea.Lua.lang.psi.visitor.LuaElementVisitor;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* Created by IntelliJ IDEA.
* User: Jon S Akhtar
* Date: Apr 14, 2010
* Time: 7:16:01 PM
*/
public class LuaPsiElementFactoryImpl extends LuaPsiElementFactory {
Project myProject;
public LuaPsiElementFactoryImpl(Project project) {
myProject = project;
}
@Nullable
private static PsiElement getChildOfFirstStatement(@NotNull PsiElement file) {
return file.getFirstChild().getNextSibling();
}
@NotNull
private LuaPsiFile createDummyFile(@NotNull CharSequence s, boolean isPhysical) {
return (LuaPsiFile) PsiFileFactory.getInstance(myProject).createFileFromText(
"DUMMY__." + LuaFileType.LUA_FILE_TYPE.getDefaultExtension(), LuaFileType.LUA_FILE_TYPE, s,
System.currentTimeMillis(), isPhysical);
}
@NotNull
private LuaPsiFile createDummyFile(@NotNull CharSequence s) {
return createDummyFile(s, false);
}
@NotNull
@Override
public LuaPsiFile createLuaFile(@NotNull String text) {
return createLuaFile(text, false, null);
}
@Nullable
@Override
public LuaSymbol createReferenceNameFromText(String newElementName) {
LuaPsiFile file = createDummyFile(newElementName + " = nil");
if (PsiTreeUtil.hasErrorElements(file))
return null;
if (!(getChildOfFirstStatement(file) instanceof LuaAssignmentStatement))
return null;
LuaAssignmentStatement assign = (LuaAssignmentStatement) getChildOfFirstStatement(file);
assert assign != null;
final LuaIdentifierList leftExpressions = assign.getLeftExprs();
if (leftExpressions.count() == 0)
return null;
LuaSymbol e = leftExpressions.getSymbols()[0];
if (e.getText().equals(newElementName))
return e;
return null;
}
@Override
public LuaIdentifier createLocalNameIdentifier(@NotNull String name) {
int firstDot = name.indexOf('.');
String prefix = name.substring(0, firstDot > 0 ? firstDot : name.length());
LuaPsiFile file = createDummyFile("local " + prefix + "; " + name +
" = nil");
final LuaIdentifier[] declaration = new LuaIdentifier[1];
file.acceptChildren(new LuaElementVisitor() {
@Override
public void visitAssignment(LuaAssignmentStatement e) {
if (e instanceof LuaLocalDefinitionStatement)
return;
declaration[0] = (LuaIdentifier) e.getAssignments()[0].getSymbol();
}
});
return declaration[0];
}
@NotNull
@Override
public LuaIdentifier createGlobalNameIdentifier(String name) {
LuaPsiFile file = createDummyFile(name + "=true; nop=" + name);
final LuaAssignmentStatement expressionStatement = (LuaAssignmentStatement) file.getLastChild();
assert expressionStatement != null;
final PsiReference ref = (PsiReference) expressionStatement.getRightExprs().getFirstChild();
return (LuaIdentifier) ref.getElement();
}
@NotNull
@Override
public LuaIdentifier createFieldNameIdentifier(String name) {
LuaPsiFile file = createDummyFile("a." + name + "=nil");
LuaAssignmentStatement assign = (LuaAssignmentStatement) getChildOfFirstStatement(file);
assert assign != null;
LuaReferenceElement element = assign.getLeftExprs().getReferenceExprs()[0];
LuaCompoundIdentifier id = (LuaCompoundIdentifier) element.getElement();
return (LuaIdentifier) id.getRightSymbol();
}
@Nullable
@Override
public LuaExpression createExpressionFromText(String newExpression) {
LuaPsiFile file = createDummyFile("return " + newExpression);
LuaReturnStatement ret = (LuaReturnStatement) getChildOfFirstStatement(file);
if (ret == null)
return null;
return ret.getReturnValue().getLuaExpressions().get(0);
}
@Override
public LuaStatementElement createStatementFromText(@NotNull String newStatement) {
LuaPsiFile file = createDummyFile(newStatement);
return file.getStatements()[0];
}
@NotNull
@Override
public PsiComment createCommentFromText(@NotNull String s, PsiElement parent) {
LuaPsiFile file = createDummyFile(s);
return (PsiComment) file.getChildren()[0];
}
@Override
public PsiElement createWhiteSpaceFromText(@NotNull String text) {
LuaPsiFile file = createDummyFile(text);
return file.getChildren()[0];
}
@Override
public LuaDeclarationExpression createLocalNameIdentifierDecl(String name) {
LuaPsiFile file = createDummyFile("local " + name + " = 1");
final LuaDeclarationExpression[] declaration = new LuaDeclarationExpression[1];
file.acceptChildren(new LuaElementVisitor() {
@Override
public void visitDeclarationStatement(@NotNull LuaDeclarationStatement e) {
declaration[0] = (LuaDeclarationExpression) e.getDefinedSymbols()[0];
}
});
return declaration[0];
}
@Nullable
@Override
public LuaDeclarationExpression createGlobalNameIdentifierDecl(String name) {
LuaPsiFile file = createDummyFile(name + "=true");
final LuaAssignmentStatement expressionStatement = (LuaAssignmentStatement) getChildOfFirstStatement(file);
if (expressionStatement != null) {
return (LuaDeclarationExpression) expressionStatement.getLeftExprs().getFirstChild().getFirstChild();
}
return null;
}
@Nullable
@Override
public LuaDeclarationExpression createParameterNameIdentifier(String name) {
LuaPsiFile file = createDummyFile("function a(" + name + ") end");
final LuaFunctionDefinition functionDef = (LuaFunctionDefinition) getChildOfFirstStatement(file);
if (functionDef != null)
return functionDef.getParameters().getLuaParameters()[0];
return null;
}
@NotNull
@Override
public LuaExpressionCodeFragment createExpressionCodeFragment(String text, PsiElement context, boolean b) {
return new LuaCodeFragment(myProject, new LuaExpressionFragmentElementType(), b,
"dummy.lua", text, context);
}
@NotNull
@Override
public LuaDocComment createDocCommentFromText(@NotNull String s) {
LuaPsiFile file = createDummyFile(s);
PsiElement e = getChildOfFirstStatement(file);
assert e instanceof LuaDocComment : "Error creating comment from " + s;
return (LuaDocComment) e;
}
@Nullable
@Override
public LuaDocReferenceElement createDocFieldReferenceNameFromText(String elementName) {
LuaPsiFile file = createDummyFile("--- @field " + elementName + "\nlocal a={" + elementName + "=true}");
LuaDocComment comment = (LuaDocComment) getChildOfFirstStatement(file);
if (comment == null)
return null;
LuaDocTag tag = comment.getTags()[0];
return tag.getDocFieldReference();
}
@Nullable
@Override
public LuaDocParameterReference createParameterDocMemberReferenceNameFromText(String elementName) {
LuaPsiFile file = createDummyFile("--- @param " + elementName + "\nfunction _" + elementName + " (" + elementName + ") end");
LuaDocComment comment = (LuaDocComment) getChildOfFirstStatement(file);
if (comment == null)
return null;
LuaDocTag tag = comment.getTags()[0];
return tag.getDocParameterReference();
}
@Override
public LuaIdentifier createIdentifier(String name) {
LuaPsiFile file = createDummyFile(name + "=true");
final LuaIdentifier[] declaration = new LuaIdentifier[1];
file.accept(new LuaElementVisitor() {
@Override
public void visitAssignment(@NotNull LuaAssignmentStatement e) {
declaration[0] = (LuaIdentifier) e.getAssignments()[0].getSymbol();
}
});
return declaration[0];
}
@NotNull
public LuaPsiFile createLuaFile(@NotNull CharSequence text, boolean isPhysical, @Nullable PsiElement context) {
LuaPsiFile file = createDummyFile(text, isPhysical);
if (context != null)
file.setContext(context);
return file;
}
}
| |
/*
* Copyright to the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.rioproject.impl.client;
import com.sun.jini.lookup.entry.LookupAttributes;
import net.jini.admin.Administrable;
import net.jini.admin.JoinAdmin;
import net.jini.config.Configuration;
import net.jini.config.ConfigurationException;
import net.jini.config.EmptyConfiguration;
import net.jini.core.discovery.LookupLocator;
import net.jini.core.entry.Entry;
import net.jini.core.lookup.ServiceID;
import net.jini.core.lookup.ServiceItem;
import net.jini.core.lookup.ServiceTemplate;
import net.jini.discovery.DiscoveryManagement;
import net.jini.discovery.LookupDiscovery;
import net.jini.discovery.LookupDiscoveryManager;
import net.jini.id.Uuid;
import net.jini.lease.LeaseRenewalManager;
import net.jini.lookup.*;
import net.jini.lookup.entry.Name;
import org.rioproject.deploy.ServiceBeanInstance;
import org.rioproject.deploy.ServiceRecord;
import org.rioproject.impl.container.ServiceBeanContainer;
import org.rioproject.impl.container.ServiceBeanContainerListener;
import org.rioproject.impl.container.ServiceBeanDelegate;
import org.rioproject.impl.opstring.OpStringFilter;
import org.rioproject.opstring.ClassBundle;
import org.rioproject.servicecore.Service;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.rmi.RemoteException;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
/**
* The LookupCachePool class provides the support to get an existing
* LookupCache from a pool of created LookupCache instances. Criteria for
* determining LookupCache matching is based on ServiceTemplate matching
*
* @author Dennis Reedy
*/
public class LookupCachePool {
private final List<SDMWrapper> pool = new ArrayList<>();
private ServiceBeanContainerListener containerListener;
private static final String COMPONENT = LookupCachePool.class.getName();
private static final Logger logger = LoggerFactory.getLogger(COMPONENT);
private static Configuration config;
private static LookupCachePool singleton = new LookupCachePool();
private LookupCachePool() {
logger.debug("Create new LookupCachePool");
}
/**
* Get the singleton instance of the LookupCachePool
*
* @return An instance of the LookupCachePool
*/
public static synchronized LookupCachePool getInstance() {
return singleton;
}
/**
* Set the {@link org.rioproject.impl.container.ServiceBeanContainer}
*
* @param container The <tt><ServiceBeanContainer/tt> used for local
* discovery
*/
public void setServiceBeanContainer(final ServiceBeanContainer container) {
if(container !=null && containerListener==null) {
containerListener = new ContainerListener(container);
}
}
/**
* Set the Configuration property
*
* @param conf The Configuration to use when creating
* ServiceDiscoveryManager instances
*/
public void setConfiguration(final Configuration conf) {
config = conf;
logger.debug("Set configuration for LookupCachePool {}", config);
}
/**
* This method will return an instance of LookupCache based on matching the
* DiscoveryManagement instance and ServiceTemplate provided as
* criteria. If there is an existing LookupCache instance created by a
* ServiceDiscoveryManager instance this utility has created that matches the
* supplied criteria, that instance will be returned.
*
* <p>If the LookupCache can not be found due to not being able to match discovery
* criteria to a known ServiceDiscoveryManager instance a new
* ServiceDiscoveryManager instance will be created, then a LookupCache instance
* created and returned.
*
* <p>If a ServiceDiscoveryManager can be matched, but not a LookupCache, a new
* LookupCache will be created using the matched ServiceDiscoveryManager
*
* @param dMgr A DiscoveryManager instance created by the DiscoveryManagementPool
* @param template ServiceTemplate to match
* @return A ServiceDiscoveryManager object based on the provided parameters or
* null if the DiscoveryManagement instance was not created by the
* DiscoveryManagementPool
*
* @throws IOException If discovery management cannot be created
*/
public LookupCache getLookupCache(final DiscoveryManagement dMgr, final ServiceTemplate template) throws IOException {
if(!(dMgr instanceof DiscoveryManagementPool.SharedDiscoveryManager)) {
logger.warn("The DiscoveryManagement instance passed was not created by the {}, returning null",
DiscoveryManagementPool.class.getName());
return(null);
}
DiscoveryManagementPool.SharedDiscoveryManager sharedDM = (DiscoveryManagementPool.SharedDiscoveryManager)dMgr;
return getLookupCache(sharedDM.getSharedName(), sharedDM.getGroups(), sharedDM.getLocators(), template);
}
/**
* This method will return an instance of LookupCache based on matching the
* shared name, shared discovery name, groups, locators and ServiceTemplate
* provided as criteria. If there is an existing LookupCache instance created
* by a ServiceDiscoveryManager instance this utility has created that matches
* the supplied criteria, that instance will be returned.
*
* <p>If a LookupCache can not be found due to not being able to match discovery
* criteria, a new ServiceDiscoveryManager instance will be created using the
* provided discovery criteria, and a LookupCache instance created and returned.
*
* <p>If a ServiceDiscoveryManager can be matched, but not a LookupCache, a new
* LookupCache will be created using the matched ServiceDiscoveryManager
*
* @param sharedName The name the LookupCache instances are shared across
* @param groups An array of String objects indicating the Jini Lookup
* Service groups to discover
* @param locators An array of LookupLocator objects indicating specific
* Jini Lookup Service instances to discover
* @param template ServiceTemplate to match
* @return A ServiceDiscoveryManager object based on the provided parameters
*
* @throws IOException If discovery management cannot be created
*/
public LookupCache getLookupCache(final String sharedName,
final String[] groups,
final LookupLocator[] locators,
final ServiceTemplate template) throws IOException {
if(template==null)
throw new IllegalArgumentException("template is null");
SDMWrapper sdmWrapper;
try {
/*LookupDiscoveryManager lookupDiscoveryManager = new LookupDiscoveryManager(groups, locators, null,
config == null ? EmptyConfiguration.INSTANCE : config);
ServiceDiscoveryManager serviceDiscoveryManager =
new ServiceDiscoveryManager(lookupDiscoveryManager,
new LeaseRenewalManager(EmptyConfiguration.INSTANCE),
EmptyConfiguration.INSTANCE);*/
sdmWrapper = getSDMWrapper(sharedName, groups, locators);
//return serviceDiscoveryManager.createLookupCache(template, null, null);
} catch(ConfigurationException e) {
throw new IOException("Configuration problem creating a SDMWrapper", e);
}
return sdmWrapper.getLookupCache(template, true);
}
/**
* For all ServiceDiscoveryManager instances this utility has created, terminate
* them and set the singleton instance to null;
*/
public void terminate() {
SDMWrapper[] sdms = getSDMWrappers();
for(SDMWrapper sdmWrapper : sdms) {
sdmWrapper.sdm.terminate();
}
pool.clear();
singleton = null;
}
private SDMWrapper[] getSDMWrappers() {
SDMWrapper[] sdms;
synchronized(pool) {
sdms = pool.toArray(new SDMWrapper[0]);
}
return sdms;
}
/*
* Get an SDMWrapper which matches the sharedName, discovery criteria, or
* create one if not found
*/
private SDMWrapper getSDMWrapper(final String sharedName,
final String[] groupsToMatch,
final LookupLocator[] locatorsToMatch) throws IOException, ConfigurationException {
SDMWrapper sdmWrapper = null;
SDMWrapper[] sdms = getSDMWrappers();
for(SDMWrapper wrapper : sdms) {
if(wrapper.namesMatch(sharedName) &&
wrapper.groupsMatch(groupsToMatch) &&
wrapper.locatorsMatch(locatorsToMatch)) {
sdmWrapper = wrapper;
break;
}
}
if(sdmWrapper==null) {
config = (config==null?EmptyConfiguration.INSTANCE:config);
ServiceDiscoveryManager sdm =
new ServiceDiscoveryManager(DiscoveryManagementPool.getInstance().getDiscoveryManager(sharedName,
groupsToMatch,
locatorsToMatch),
new LeaseRenewalManager(config),
config);
sdmWrapper = new SDMWrapper(sharedName, sdm, groupsToMatch, locatorsToMatch);
synchronized(pool) {
pool.add(sdmWrapper);
}
}
return sdmWrapper;
}
/**
* The SDMWrapper class provides a wrapper around a known ServiceDiscoveryManager,
* the lookupCache instances that this utility has created using the
* ServiceDiscoveryManager and checks to see if the referenced
* ServiceDiscoveryManager matches specified criteria, or if any known
* LookupCache instances match criteria
*/
class SDMWrapper {
final String sharedName;
final ServiceDiscoveryManager sdm;
String[] groups;
final LookupLocator[] locators;
final Hashtable<ServiceTemplate, SharedLookupCache> cacheTable = new Hashtable<>();
SDMWrapper(final String sharedName, final ServiceDiscoveryManager sdm, final String[] groups, final LookupLocator[] locators) {
this.sharedName = sharedName;
this.sdm = sdm;
if(groups!=null) {
this.groups = new String[groups.length];
System.arraycopy(groups, 0, this.groups, 0, this.groups.length);
}
this.locators = new LookupLocator[locators.length];
System.arraycopy(locators, 0, this.locators, 0, this.locators.length);
}
void removeCache(final SharedLookupCache lCache) {
ServiceTemplate templateToMatch = lCache.getServiceTemplate();
for(Enumeration<ServiceTemplate> en=cacheTable.keys(); en.hasMoreElements();) {
ServiceTemplate template = en.nextElement();
if(templatesMatch(template, templateToMatch)) {
cacheTable.remove(templateToMatch);
break;
}
}
logger.trace("removeCache(), cacheTable.size()=={}",cacheTable.size());
if(cacheTable.size()==0) {
try {
sdm.terminate();
} catch (IllegalStateException e) {
logger.trace("Terminating SDM", e);
}
synchronized(pool) {
pool.remove(this);
}
}
}
/*
* See if the sharedNames match
*
* @param name
*
* @return true if they do, false otherwise
*/
boolean namesMatch(final String name) {
if(sharedName==null && name==null)
return true;
if(sharedName==null)
return false;
if(name == null)
return (false);
return(sharedName.equals(name));
}
/**
* See if the groups provided match the groups for the referenced SDM
*
* @param groupsToMatch The groups
*
* @return true if they do, false otherwise
*/
boolean groupsMatch(final String[] groupsToMatch) {
/* If both are set to ALL_GROUPS we have a match */
if(groupsToMatch == LookupDiscovery.ALL_GROUPS &&
groups == LookupDiscovery.ALL_GROUPS) {
return true;
}
/* If one or the other is set to ALL_GROUPS return false */
if(groupsToMatch == LookupDiscovery.ALL_GROUPS ||
groups == LookupDiscovery.ALL_GROUPS)
return false;
/* If both have the same "set", check for equivalence */
if(groupsToMatch.length == groups.length) {
int matches=0;
for(int i=0; i<groupsToMatch.length; i++) {
if(groupsToMatch[i].equals(groups[i]))
matches++;
}
return matches == groupsToMatch.length;
}
return false;
}
/**
* See if the locators provided match the locators for the referenced SDM
*
* @param locatorsToMatch The locators
*
* @return true if they do. false otherwise
*/
boolean locatorsMatch(final LookupLocator[] locatorsToMatch) {
boolean matched=false;
if(locatorsToMatch == null && locators == null)
return true;
if(locatorsToMatch == null && locators.length==0)
return true;
if(locatorsToMatch!=null && locators!=null) {
if(locatorsToMatch.length == locators.length) {
int matches=0;
for(int i=0; i<locatorsToMatch.length; i++) {
if(locatorsToMatch[i].equals(locators[i]))
matches++;
}
if(matches==locatorsToMatch.length)
matched=true;
}
}
return(matched);
}
/**
* Get a LookupCache from the cacheTable for the provided ServiceTemplate.
*
* @param templateToMatch The template
* @param create If true and a a LookupCache does not exist, create one
*
* @return A SharedLookupCache for the ServiceTemplate
*
* @throws IOException If a LookupCache cannot be created
*/
SharedLookupCache getLookupCache(final ServiceTemplate templateToMatch, final boolean create) throws IOException {
SharedLookupCache lCache = null;
for(Enumeration<?> en = cacheTable.keys(); en.hasMoreElements();) {
ServiceTemplate template = (ServiceTemplate)en.nextElement();
if(templatesMatch(template, templateToMatch)) {
lCache = cacheTable.get(template);
break;
}
}
if(lCache==null && create) {
ServiceItemFilter filter = sharedName==null ? null : new OpStringFilter(sharedName);
LookupCache lc = sdm.createLookupCache(templateToMatch, filter, null);
lCache = new SharedLookupCache(lc, templateToMatch, this);
lCache.setServiceItemFilter(filter);
cacheTable.put(templateToMatch, lCache);
}
return(lCache);
}
/**
* Determine of the ServiceTemplate instances match each other
*
* @param st1 ServiceTemplate instance 1
* @param st2 ServiceTemplate instance 2
*
* @return true if they match
*/
boolean templatesMatch(final ServiceTemplate st1, final ServiceTemplate st2) {
return attributesMatch(st1.attributeSetTemplates, st2.attributeSetTemplates) &&
serviceIDsMatch(st1.serviceID, st2.serviceID) &&
serviceTypesMatch(st1.serviceTypes, st2.serviceTypes);
}
}
/*
* Check if attributes match
*/
boolean attributesMatch(final Entry[] attr1, final Entry[] attr2) {
if(attr1==null && attr2==null)
return true;
if(attr1==null || attr2==null)
return false;
return(LookupAttributes.equal(attr1, attr2));
}
/*
* Check if service ID match
*/
boolean serviceIDsMatch(final ServiceID sid1, final ServiceID sid2) {
if(sid1==null && sid2==null)
return true;
if(sid1==null || sid2==null)
return false;
return(sid1.equals(sid2));
}
/*
* Check if service types match
*/
boolean serviceTypesMatch(final Class<?>[] types1, final Class<?>[] types2) {
if(types1==null && types2==null)
return true;
if(types1==null || types2==null)
return false;
if(types1.length == types2.length) {
int matches = 0;
for (Class<?> c1 : types1) {
for (Class<?> c2 : types2) {
if (c1.getName().equals(c2.getName()))
matches++;
}
}
return matches == types1.length;
}
return false;
}
/**
* The SharedLookupCache implements a LookupCache and delegates all method
* invocations to it's LookupDiscoveryManager, and maintains a
* reference counter for how many clients are sharing the instance. The reference
* counter is used to determine if the LookupDiscoveryManager should be
* terminated. The reference counter is increments each time this instance is
* shared, and decremented each time the terminate method is called. If the
* reference counter goes to zero upon termination the LookupDiscoveryManager
* will be terminated
*/
public class SharedLookupCache implements LookupCache {
private final LookupCache lCache;
private final ServiceTemplate template;
private final AtomicInteger refCounter = new AtomicInteger();
private final SDMWrapper sdmWrapper;
private boolean terminated = false;
private ServiceItemFilter filter;
private final List<ServiceDiscoveryListener> localListeners = new ArrayList<>();
public SharedLookupCache(final LookupCache lCache,
final ServiceTemplate template,
final SDMWrapper sdmWrapper) {
this.lCache = lCache;
this.template = template;
this.sdmWrapper = sdmWrapper;
}
void setServiceItemFilter(final ServiceItemFilter filter) {
this.filter = filter;
}
/*
* Get the ServiceTemplate
*/
private ServiceTemplate getServiceTemplate() {
return(template);
}
/* (non-Javadoc)
* @see net.jini.lookup.LookupCache#lookup(ServiceItemFilter)
*/
public ServiceItem lookup(final ServiceItemFilter filter) {
return(lCache.lookup(filter));
}
/* (non-Javadoc)
* @see LookupCache#lookup(ServiceItemFilter, int)
*/
public ServiceItem[] lookup(final ServiceItemFilter filter, final int maxMatches) {
return lCache.lookup(filter, maxMatches);
}
/* (non-Javadoc)
* @see ServiceDiscoveryManager#lookup(ServiceTemplate, int, ServiceItemFilter)
*/
public ServiceItem[] lookupRemote(final ServiceItemFilter filter, final int maxMatches) {
return sdmWrapper.sdm.lookup(template, maxMatches, filter);
}
/* (non-Javadoc)
* @see LookupCache#addListener(ServiceDiscoveryListener)
*/
public synchronized void addListener(final ServiceDiscoveryListener listener) {
refCounter.incrementAndGet();
logger.trace("Added LookupCache Listener for template [{}], refCounter: {}",
getServiceTemplateAsString(), refCounter.get());
synchronized(localListeners) {
localListeners.add(listener);
}
lCache.addListener(listener);
}
/* (non-Javadoc)
* @see LookupCache#removeListener(ServiceDiscoveryListener)
*/
public synchronized void removeListener(final ServiceDiscoveryListener listener) {
if(!terminated) {
synchronized(localListeners) {
localListeners.remove(listener);
}
lCache.removeListener(listener);
refCounter.decrementAndGet();
}
logger.trace("Removed LookupCache Listener for template [{}], refCounter: {}",
getServiceTemplateAsString(), refCounter.get());
logger.trace("lCache={} refCounter={}", lCache.toString(), refCounter.get());
if(refCounter.get()==0) {
terminate();
}
}
/* (non-Javadoc)
* @see LookupCache#discard(Object)
*/
public void discard(final Object o) {
logger.trace("Discard {} from LookupCache", o.getClass().getName());
try {
lCache.discard(o);
} catch(IllegalStateException e) {
logger.warn("Could not discard {}, {}", o.getClass().getName(), e.getMessage());
}
}
/* (non-Javadoc)
* @see net.jini.lookup.LookupCache#terminate()
*/
public synchronized void terminate() {
if(refCounter.get()==0) {
logger.trace("Terminating LookupCache for template [{}], refCounter: {}",
getServiceTemplateAsString(), refCounter.get());
terminated = true;
lCache.terminate();
sdmWrapper.removeCache(this);
}
}
String getServiceTemplateAsString() {
StringBuilder sb1 = new StringBuilder();
StringBuilder sb2 = new StringBuilder();
if(template.serviceTypes!=null) {
for(Class<?> c : template.serviceTypes) {
if(sb2.length()>0)
sb2.append(", ");
sb2.append(c.getName());
}
}
sb1.append("types: [").append(sb2.toString()).append("] ");
sb2.delete(0, sb2.length());
if(template.attributeSetTemplates!=null) {
for(Entry e : template.attributeSetTemplates) {
if(sb2.length()>0)
sb2.append(", ");
if(e instanceof Name)
sb2.append("Name: ").append(((Name)e).name);
else
sb2.append(e.getClass().getName());
}
}
sb1.append("attributes: [").append(sb2.toString()).append("] ");
return sb1.toString();
}
void notifyOnLocalAdd(final ServiceItem item) {
boolean cleared = true;
if(filter!=null) {
cleared = filter.check(item);
}
if(cleared) {
ServiceDiscoveryListener[] listeners;
synchronized(localListeners) {
listeners = localListeners.toArray(new ServiceDiscoveryListener[0]);
}
for(ServiceDiscoveryListener l : listeners) {
l.serviceAdded(new ServiceDiscoveryEvent(this, null, item));
}
}
}
}
class ContainerListener implements ServiceBeanContainerListener {
private final ServiceBeanContainer container;
private final List<ServiceRecord> notified = new ArrayList<>();
ContainerListener(final ServiceBeanContainer container) {
this.container = container;
container.addListener(this);
}
private ServiceBeanInstance getServiceBeanInstance(final ServiceRecord r) {
ServiceBeanDelegate delegate = container.getServiceBeanDelegate(r.getServiceID());
return delegate.getServiceBeanInstance();
}
List<Class<?>> getAllInterfaces(Class<?> classObject) {
List<Class<?>> list = new ArrayList<>();
if(classObject.getName().equals(Object.class.getName()))
return list;
list.addAll(getAllInterfaces(classObject.getSuperclass()));
Collections.addAll(list, classObject.getInterfaces());
return list;
}
public void serviceInstantiated(final ServiceRecord record) {
try {
ServiceBeanInstance instance = getServiceBeanInstance(record);
Class<?> proxyClass = instance.getService().getClass();
Class<?> theInterfaceClass = null;
for(Class<?> interfaceClass : getAllInterfaces(proxyClass)) {
for(ClassBundle cb : record.getServiceElement().getExportBundles()) {
if(interfaceClass.getName().equals(cb.getClassName())) {
theInterfaceClass = interfaceClass;
break;
}
}
if(theInterfaceClass!=null)
break;
}
if(theInterfaceClass==null) {
logger.warn("No matching interface class found for {}, defaulting to {}",
record.getServiceElement().getName(), Service.class.getName());
theInterfaceClass = Service.class;
}
logger.trace("[{}] selected: {}", record.getServiceElement().getName(), theInterfaceClass.getName());
ServiceTemplate templateToMatch = JiniClient.getServiceTemplate(record.getServiceElement(),
theInterfaceClass);
SDMWrapper[] sdms = getSDMWrappers();
for(SDMWrapper sdm : sdms) {
SharedLookupCache lCache = sdm.getLookupCache(templateToMatch, false);
if(lCache!=null) {
boolean alreadyNotified;
synchronized(notified) {
alreadyNotified = notified.contains(record);
}
if(!alreadyNotified) {
logger.trace("Notify listeners of local instantiation of {} {}",
record.getServiceElement().getName(), theInterfaceClass.getName());
lCache.notifyOnLocalAdd(makeServiceItem(instance));
synchronized(notified) {
notified.add(record);
}
}
}
}
} catch (Exception e) {
logger.warn("Unable to load service interface", e);
}
}
public void serviceDiscarded(final ServiceRecord record) {
synchronized(notified) {
notified.remove(record);
}
}
private ServiceItem makeServiceItem(final ServiceBeanInstance instance) throws IOException,
ClassNotFoundException {
Uuid uuid = instance.getServiceBeanID();
ServiceID serviceID = new ServiceID(uuid.getMostSignificantBits(), uuid.getLeastSignificantBits());
Entry[] attrs = null;
Object service = instance.getService();
if(service instanceof Administrable) {
try {
Object admin = ((Administrable)service).getAdmin();
if(admin instanceof JoinAdmin) {
attrs = ((JoinAdmin)admin).getLookupAttributes();
}
} catch(RemoteException e) {
logger.warn("Getting attributes from [{}]", instance.getServiceBeanConfig().getName(), e);
}
}
return(new ServiceItem(serviceID, service, attrs));
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.realtime.appenderator;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterables;
import org.apache.druid.client.CachingQueryRunner;
import org.apache.druid.client.cache.Cache;
import org.apache.druid.client.cache.CacheConfig;
import org.apache.druid.client.cache.CachePopulatorStats;
import org.apache.druid.client.cache.ForegroundCachePopulator;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.java.util.common.concurrent.Execs;
import org.apache.druid.java.util.common.guava.CloseQuietly;
import org.apache.druid.java.util.common.guava.FunctionalIterable;
import org.apache.druid.java.util.emitter.EmittingLogger;
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
import org.apache.druid.query.BySegmentQueryRunner;
import org.apache.druid.query.CPUTimeMetricQueryRunner;
import org.apache.druid.query.MetricsEmittingQueryRunner;
import org.apache.druid.query.NoopQueryRunner;
import org.apache.druid.query.Query;
import org.apache.druid.query.QueryMetrics;
import org.apache.druid.query.QueryRunner;
import org.apache.druid.query.QueryRunnerFactory;
import org.apache.druid.query.QueryRunnerFactoryConglomerate;
import org.apache.druid.query.QueryRunnerHelper;
import org.apache.druid.query.QuerySegmentWalker;
import org.apache.druid.query.QueryToolChest;
import org.apache.druid.query.ReportTimelineMissingSegmentQueryRunner;
import org.apache.druid.query.SegmentDescriptor;
import org.apache.druid.query.TableDataSource;
import org.apache.druid.query.spec.SpecificSegmentQueryRunner;
import org.apache.druid.query.spec.SpecificSegmentSpec;
import org.apache.druid.segment.Segment;
import org.apache.druid.segment.realtime.FireHydrant;
import org.apache.druid.segment.realtime.plumber.Sink;
import org.apache.druid.timeline.SegmentId;
import org.apache.druid.timeline.TimelineObjectHolder;
import org.apache.druid.timeline.VersionedIntervalTimeline;
import org.apache.druid.timeline.partition.PartitionChunk;
import org.apache.druid.timeline.partition.PartitionHolder;
import org.joda.time.Interval;
import java.io.Closeable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.atomic.AtomicLong;
public class SinkQuerySegmentWalker implements QuerySegmentWalker
{
private static final EmittingLogger log = new EmittingLogger(SinkQuerySegmentWalker.class);
private static final String CONTEXT_SKIP_INCREMENTAL_SEGMENT = "skipIncrementalSegment";
private final String dataSource;
private final VersionedIntervalTimeline<String, Sink> sinkTimeline;
private final ObjectMapper objectMapper;
private final ServiceEmitter emitter;
private final QueryRunnerFactoryConglomerate conglomerate;
private final ExecutorService queryExecutorService;
private final Cache cache;
private final CacheConfig cacheConfig;
private final CachePopulatorStats cachePopulatorStats;
public SinkQuerySegmentWalker(
String dataSource,
VersionedIntervalTimeline<String, Sink> sinkTimeline,
ObjectMapper objectMapper,
ServiceEmitter emitter,
QueryRunnerFactoryConglomerate conglomerate,
ExecutorService queryExecutorService,
Cache cache,
CacheConfig cacheConfig,
CachePopulatorStats cachePopulatorStats
)
{
this.dataSource = Preconditions.checkNotNull(dataSource, "dataSource");
this.sinkTimeline = Preconditions.checkNotNull(sinkTimeline, "sinkTimeline");
this.objectMapper = Preconditions.checkNotNull(objectMapper, "objectMapper");
this.emitter = Preconditions.checkNotNull(emitter, "emitter");
this.conglomerate = Preconditions.checkNotNull(conglomerate, "conglomerate");
this.queryExecutorService = Preconditions.checkNotNull(queryExecutorService, "queryExecutorService");
this.cache = Preconditions.checkNotNull(cache, "cache");
this.cacheConfig = Preconditions.checkNotNull(cacheConfig, "cacheConfig");
this.cachePopulatorStats = Preconditions.checkNotNull(cachePopulatorStats, "cachePopulatorStats");
if (!cache.isLocal()) {
log.warn("Configured cache[%s] is not local, caching will not be enabled.", cache.getClass().getName());
}
}
@Override
public <T> QueryRunner<T> getQueryRunnerForIntervals(final Query<T> query, final Iterable<Interval> intervals)
{
final Iterable<SegmentDescriptor> specs = FunctionalIterable
.create(intervals)
.transformCat(
new Function<Interval, Iterable<TimelineObjectHolder<String, Sink>>>()
{
@Override
public Iterable<TimelineObjectHolder<String, Sink>> apply(final Interval interval)
{
return sinkTimeline.lookup(interval);
}
}
)
.transformCat(
new Function<TimelineObjectHolder<String, Sink>, Iterable<SegmentDescriptor>>()
{
@Override
public Iterable<SegmentDescriptor> apply(final TimelineObjectHolder<String, Sink> holder)
{
return FunctionalIterable
.create(holder.getObject())
.transform(
new Function<PartitionChunk<Sink>, SegmentDescriptor>()
{
@Override
public SegmentDescriptor apply(final PartitionChunk<Sink> chunk)
{
return new SegmentDescriptor(
holder.getInterval(),
holder.getVersion(),
chunk.getChunkNumber()
);
}
}
);
}
}
);
return getQueryRunnerForSegments(query, specs);
}
@Override
public <T> QueryRunner<T> getQueryRunnerForSegments(final Query<T> query, final Iterable<SegmentDescriptor> specs)
{
// We only handle one particular dataSource. Make sure that's what we have, then ignore from here on out.
if (!(query.getDataSource() instanceof TableDataSource)
|| !dataSource.equals(((TableDataSource) query.getDataSource()).getName())) {
log.makeAlert("Received query for unknown dataSource")
.addData("dataSource", query.getDataSource())
.emit();
return new NoopQueryRunner<>();
}
final QueryRunnerFactory<T, Query<T>> factory = conglomerate.findFactory(query);
if (factory == null) {
throw new ISE("Unknown query type[%s].", query.getClass());
}
final QueryToolChest<T, Query<T>> toolChest = factory.getToolchest();
final boolean skipIncrementalSegment = query.getContextValue(CONTEXT_SKIP_INCREMENTAL_SEGMENT, false);
final AtomicLong cpuTimeAccumulator = new AtomicLong(0L);
return CPUTimeMetricQueryRunner.safeBuild(
toolChest.mergeResults(
factory.mergeRunners(
queryExecutorService,
FunctionalIterable
.create(specs)
.transform(
new Function<SegmentDescriptor, QueryRunner<T>>()
{
@Override
public QueryRunner<T> apply(final SegmentDescriptor descriptor)
{
final PartitionHolder<Sink> holder = sinkTimeline.findEntry(
descriptor.getInterval(),
descriptor.getVersion()
);
if (holder == null) {
return new ReportTimelineMissingSegmentQueryRunner<>(descriptor);
}
final PartitionChunk<Sink> chunk = holder.getChunk(descriptor.getPartitionNumber());
if (chunk == null) {
return new ReportTimelineMissingSegmentQueryRunner<>(descriptor);
}
final Sink theSink = chunk.getObject();
final SegmentId sinkSegmentId = theSink.getSegment().getId();
return new SpecificSegmentQueryRunner<>(
withPerSinkMetrics(
new BySegmentQueryRunner<>(
sinkSegmentId,
descriptor.getInterval().getStart(),
factory.mergeRunners(
Execs.directExecutor(),
Iterables.transform(
theSink,
new Function<FireHydrant, QueryRunner<T>>()
{
@Override
public QueryRunner<T> apply(final FireHydrant hydrant)
{
// Hydrant might swap at any point, but if it's swapped at the start
// then we know it's *definitely* swapped.
final boolean hydrantDefinitelySwapped = hydrant.hasSwapped();
if (skipIncrementalSegment && !hydrantDefinitelySwapped) {
return new NoopQueryRunner<>();
}
// Prevent the underlying segment from swapping when its being iterated
final Pair<Segment, Closeable> segment = hydrant.getAndIncrementSegment();
try {
QueryRunner<T> baseRunner = QueryRunnerHelper.makeClosingQueryRunner(
factory.createRunner(segment.lhs),
segment.rhs
);
// 1) Only use caching if data is immutable
// 2) Hydrants are not the same between replicas, make sure cache is local
if (hydrantDefinitelySwapped && cache.isLocal()) {
return new CachingQueryRunner<>(
makeHydrantCacheIdentifier(hydrant),
descriptor,
objectMapper,
cache,
toolChest,
baseRunner,
// Always populate in foreground regardless of config
new ForegroundCachePopulator(
objectMapper,
cachePopulatorStats,
cacheConfig.getMaxEntrySize()
),
cacheConfig
);
} else {
return baseRunner;
}
}
catch (RuntimeException e) {
CloseQuietly.close(segment.rhs);
throw e;
}
}
}
)
)
),
toolChest,
sinkSegmentId,
cpuTimeAccumulator
),
new SpecificSegmentSpec(descriptor)
);
}
}
)
)
),
toolChest,
emitter,
cpuTimeAccumulator,
true
);
}
/**
* Decorates a Sink's query runner to emit query/segmentAndCache/time, query/segment/time, query/wait/time once
* each for the whole Sink. Also adds CPU time to cpuTimeAccumulator.
*/
private <T> QueryRunner<T> withPerSinkMetrics(
final QueryRunner<T> sinkRunner,
final QueryToolChest<T, ? extends Query<T>> queryToolChest,
final SegmentId sinkSegmentId,
final AtomicLong cpuTimeAccumulator
)
{
// Note: reportSegmentAndCacheTime and reportSegmentTime are effectively the same here. They don't split apart
// cache vs. non-cache due to the fact that Sinks may be partially cached and partially uncached. Making this
// better would need to involve another accumulator like the cpuTimeAccumulator that we could share with the
// sinkRunner.
String sinkSegmentIdString = sinkSegmentId.toString();
return CPUTimeMetricQueryRunner.safeBuild(
new MetricsEmittingQueryRunner<>(
emitter,
queryToolChest,
new MetricsEmittingQueryRunner<>(
emitter,
queryToolChest,
sinkRunner,
QueryMetrics::reportSegmentTime,
queryMetrics -> queryMetrics.segment(sinkSegmentIdString)
),
QueryMetrics::reportSegmentAndCacheTime,
queryMetrics -> queryMetrics.segment(sinkSegmentIdString)
).withWaitMeasuredFromNow(),
queryToolChest,
emitter,
cpuTimeAccumulator,
false
);
}
public static String makeHydrantCacheIdentifier(FireHydrant input)
{
return input.getSegmentId() + "_" + input.getCount();
}
}
| |
package org.apache.lucene.analysis.tokenattributes;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.util.AttributeImpl;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.TestUtil;
import java.nio.CharBuffer;
import java.util.HashMap;
import java.util.Formatter;
import java.util.Locale;
import java.util.regex.Pattern;
public class TestCharTermAttributeImpl extends LuceneTestCase {
public void testResize() {
CharTermAttributeImpl t = new CharTermAttributeImpl();
char[] content = "hello".toCharArray();
t.copyBuffer(content, 0, content.length);
for (int i = 0; i < 2000; i++)
{
t.resizeBuffer(i);
assertTrue(i <= t.buffer().length);
assertEquals("hello", t.toString());
}
}
public void testGrow() {
CharTermAttributeImpl t = new CharTermAttributeImpl();
StringBuilder buf = new StringBuilder("ab");
for (int i = 0; i < 20; i++)
{
char[] content = buf.toString().toCharArray();
t.copyBuffer(content, 0, content.length);
assertEquals(buf.length(), t.length());
assertEquals(buf.toString(), t.toString());
buf.append(buf.toString());
}
assertEquals(1048576, t.length());
// now as a StringBuilder, first variant
t = new CharTermAttributeImpl();
buf = new StringBuilder("ab");
for (int i = 0; i < 20; i++)
{
t.setEmpty().append(buf);
assertEquals(buf.length(), t.length());
assertEquals(buf.toString(), t.toString());
buf.append(t);
}
assertEquals(1048576, t.length());
// Test for slow growth to a long term
t = new CharTermAttributeImpl();
buf = new StringBuilder("a");
for (int i = 0; i < 20000; i++)
{
t.setEmpty().append(buf);
assertEquals(buf.length(), t.length());
assertEquals(buf.toString(), t.toString());
buf.append("a");
}
assertEquals(20000, t.length());
}
public void testToString() throws Exception {
char[] b = {'a', 'l', 'o', 'h', 'a'};
CharTermAttributeImpl t = new CharTermAttributeImpl();
t.copyBuffer(b, 0, 5);
assertEquals("aloha", t.toString());
t.setEmpty().append("hi there");
assertEquals("hi there", t.toString());
}
public void testClone() throws Exception {
CharTermAttributeImpl t = new CharTermAttributeImpl();
char[] content = "hello".toCharArray();
t.copyBuffer(content, 0, 5);
char[] buf = t.buffer();
CharTermAttributeImpl copy = assertCloneIsEqual(t);
assertEquals(t.toString(), copy.toString());
assertNotSame(buf, copy.buffer());
}
public void testEquals() throws Exception {
CharTermAttributeImpl t1a = new CharTermAttributeImpl();
char[] content1a = "hello".toCharArray();
t1a.copyBuffer(content1a, 0, 5);
CharTermAttributeImpl t1b = new CharTermAttributeImpl();
char[] content1b = "hello".toCharArray();
t1b.copyBuffer(content1b, 0, 5);
CharTermAttributeImpl t2 = new CharTermAttributeImpl();
char[] content2 = "hello2".toCharArray();
t2.copyBuffer(content2, 0, 6);
assertTrue(t1a.equals(t1b));
assertFalse(t1a.equals(t2));
assertFalse(t2.equals(t1b));
}
public void testCopyTo() throws Exception {
CharTermAttributeImpl t = new CharTermAttributeImpl();
CharTermAttributeImpl copy = assertCopyIsEqual(t);
assertEquals("", t.toString());
assertEquals("", copy.toString());
t = new CharTermAttributeImpl();
char[] content = "hello".toCharArray();
t.copyBuffer(content, 0, 5);
char[] buf = t.buffer();
copy = assertCopyIsEqual(t);
assertEquals(t.toString(), copy.toString());
assertNotSame(buf, copy.buffer());
}
public void testAttributeReflection() throws Exception {
CharTermAttributeImpl t = new CharTermAttributeImpl();
t.append("foobar");
TestUtil.assertAttributeReflection(t, new HashMap<String, Object>() {{
put(CharTermAttribute.class.getName() + "#term", "foobar");
put(TermToBytesRefAttribute.class.getName() + "#bytes", new BytesRef("foobar"));
}});
}
public void testCharSequenceInterface() {
final String s = "0123456789";
final CharTermAttributeImpl t = new CharTermAttributeImpl();
t.append(s);
assertEquals(s.length(), t.length());
assertEquals("12", t.subSequence(1,3).toString());
assertEquals(s, t.subSequence(0,s.length()).toString());
assertTrue(Pattern.matches("01\\d+", t));
assertTrue(Pattern.matches("34", t.subSequence(3,5)));
assertEquals(s.subSequence(3,7).toString(), t.subSequence(3,7).toString());
for (int i = 0; i < s.length(); i++) {
assertTrue(t.charAt(i) == s.charAt(i));
}
}
public void testAppendableInterface() {
CharTermAttributeImpl t = new CharTermAttributeImpl();
Formatter formatter = new Formatter(t, Locale.ROOT);
formatter.format("%d", 1234);
assertEquals("1234", t.toString());
formatter.format("%d", 5678);
assertEquals("12345678", t.toString());
t.append('9');
assertEquals("123456789", t.toString());
t.append((CharSequence) "0");
assertEquals("1234567890", t.toString());
t.append((CharSequence) "0123456789", 1, 3);
assertEquals("123456789012", t.toString());
t.append((CharSequence) CharBuffer.wrap("0123456789".toCharArray()), 3, 5);
assertEquals("12345678901234", t.toString());
t.append((CharSequence) t);
assertEquals("1234567890123412345678901234", t.toString());
t.append((CharSequence) new StringBuilder("0123456789"), 5, 7);
assertEquals("123456789012341234567890123456", t.toString());
t.append((CharSequence) new StringBuffer(t));
assertEquals("123456789012341234567890123456123456789012341234567890123456", t.toString());
// very wierd, to test if a subSlice is wrapped correct :)
CharBuffer buf = CharBuffer.wrap("0123456789".toCharArray(), 3, 5);
assertEquals("34567", buf.toString());
t.setEmpty().append((CharSequence) buf, 1, 2);
assertEquals("4", t.toString());
CharTermAttribute t2 = new CharTermAttributeImpl();
t2.append("test");
t.append((CharSequence) t2);
assertEquals("4test", t.toString());
t.append((CharSequence) t2, 1, 2);
assertEquals("4teste", t.toString());
try {
t.append((CharSequence) t2, 1, 5);
fail("Should throw IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException iobe) {
}
try {
t.append((CharSequence) t2, 1, 0);
fail("Should throw IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException iobe) {
}
t.append((CharSequence) null);
assertEquals("4testenull", t.toString());
}
public void testAppendableInterfaceWithLongSequences() {
CharTermAttributeImpl t = new CharTermAttributeImpl();
t.append((CharSequence) "01234567890123456789012345678901234567890123456789");
t.append((CharSequence) CharBuffer.wrap("01234567890123456789012345678901234567890123456789".toCharArray()), 3, 50);
assertEquals("0123456789012345678901234567890123456789012345678934567890123456789012345678901234567890123456789", t.toString());
t.setEmpty().append((CharSequence) new StringBuilder("01234567890123456789"), 5, 17);
assertEquals((CharSequence) "567890123456", t.toString());
t.append(new StringBuffer(t));
assertEquals((CharSequence) "567890123456567890123456", t.toString());
// very wierd, to test if a subSlice is wrapped correct :)
CharBuffer buf = CharBuffer.wrap("012345678901234567890123456789".toCharArray(), 3, 15);
assertEquals("345678901234567", buf.toString());
t.setEmpty().append(buf, 1, 14);
assertEquals("4567890123456", t.toString());
// finally use a completely custom CharSequence that is not catched by instanceof checks
final String longTestString = "012345678901234567890123456789";
t.append(new CharSequence() {
@Override
public char charAt(int i) { return longTestString.charAt(i); }
@Override
public int length() { return longTestString.length(); }
@Override
public CharSequence subSequence(int start, int end) { return longTestString.subSequence(start, end); }
@Override
public String toString() { return longTestString; }
});
assertEquals("4567890123456"+longTestString, t.toString());
}
public void testNonCharSequenceAppend() {
CharTermAttributeImpl t = new CharTermAttributeImpl();
t.append("0123456789");
t.append("0123456789");
assertEquals("01234567890123456789", t.toString());
t.append(new StringBuilder("0123456789"));
assertEquals("012345678901234567890123456789", t.toString());
CharTermAttribute t2 = new CharTermAttributeImpl();
t2.append("test");
t.append(t2);
assertEquals("012345678901234567890123456789test", t.toString());
t.append((String) null);
t.append((StringBuilder) null);
t.append((CharTermAttribute) null);
assertEquals("012345678901234567890123456789testnullnullnull", t.toString());
}
public void testExceptions() {
CharTermAttributeImpl t = new CharTermAttributeImpl();
t.append("test");
assertEquals("test", t.toString());
try {
t.charAt(-1);
fail("Should throw IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException iobe) {
}
try {
t.charAt(4);
fail("Should throw IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException iobe) {
}
try {
t.subSequence(0, 5);
fail("Should throw IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException iobe) {
}
try {
t.subSequence(5, 0);
fail("Should throw IndexOutOfBoundsException");
} catch(IndexOutOfBoundsException iobe) {
}
}
public static <T extends AttributeImpl> T assertCloneIsEqual(T att) {
@SuppressWarnings("unchecked")
T clone = (T) att.clone();
assertEquals("Clone must be equal", att, clone);
assertEquals("Clone's hashcode must be equal", att.hashCode(), clone.hashCode());
return clone;
}
public static <T extends AttributeImpl> T assertCopyIsEqual(T att) throws Exception {
@SuppressWarnings("unchecked")
T copy = (T) att.getClass().newInstance();
att.copyTo(copy);
assertEquals("Copied instance must be equal", att, copy);
assertEquals("Copied instance's hashcode must be equal", att.hashCode(), copy.hashCode());
return copy;
}
/*
// test speed of the dynamic instanceof checks in append(CharSequence),
// to find the best max length for the generic while (start<end) loop:
public void testAppendPerf() {
CharTermAttributeImpl t = new CharTermAttributeImpl();
final int count = 32;
CharSequence[] csq = new CharSequence[count * 6];
final StringBuilder sb = new StringBuilder();
for (int i=0,j=0; i<count; i++) {
sb.append(i%10);
final String testString = sb.toString();
CharTermAttribute cta = new CharTermAttributeImpl();
cta.append(testString);
csq[j++] = cta;
csq[j++] = testString;
csq[j++] = new StringBuilder(sb);
csq[j++] = new StringBuffer(sb);
csq[j++] = CharBuffer.wrap(testString.toCharArray());
csq[j++] = new CharSequence() {
public char charAt(int i) { return testString.charAt(i); }
public int length() { return testString.length(); }
public CharSequence subSequence(int start, int end) { return testString.subSequence(start, end); }
public String toString() { return testString; }
};
}
Random rnd = newRandom();
long startTime = System.currentTimeMillis();
for (int i=0; i<100000000; i++) {
t.setEmpty().append(csq[rnd.nextInt(csq.length)]);
}
long endTime = System.currentTimeMillis();
System.out.println("Time: " + (endTime-startTime)/1000.0 + " s");
}
*/
}
| |
/*
* Copyright 2008 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.javascript.jscomp.CompilerOptions.LanguageMode;
/**
* Test that warnings are generated in appropriate cases and appropriate
* cases only by VariableReferenceCheck
*
*/
public final class VariableReferenceCheckTest extends Es6CompilerTestCase {
private static final String VARIABLE_RUN =
"var a = 1; var b = 2; var c = a + b, d = c;";
private boolean enableUnusedLocalAssignmentCheck = false;
@Override
public CompilerOptions getOptions() {
CompilerOptions options = super.getOptions();
if (enableUnusedLocalAssignmentCheck) {
options.setWarningLevel(DiagnosticGroups.LINT_CHECKS, CheckLevel.WARNING);
}
return options;
}
@Override
public CompilerPass getProcessor(Compiler compiler) {
// Treats bad reads as errors, and reports bad write warnings.
return new VariableReferenceCheck(compiler);
}
@Override
public void setUp() throws Exception {
super.setUp();
}
public void testCorrectCode() {
assertNoWarning("function foo(d) { (function() { d.foo(); }); d.bar(); } ");
assertNoWarning("function foo() { bar(); } function bar() { foo(); } ");
assertNoWarning("function f(d) { d = 3; }");
assertNoWarning(VARIABLE_RUN);
assertNoWarning("if (a) { var x; }");
assertNoWarning("function f() { " + VARIABLE_RUN + "}");
}
public void testCorrectShadowing() {
assertNoWarning(VARIABLE_RUN + "function f() { " + VARIABLE_RUN + "}");
}
public void testCorrectRedeclare() {
assertNoWarning(
"function f() { if (1) { var a = 2; } else { var a = 3; } }");
}
public void testCorrectRecursion() {
assertNoWarning("function f() { var x = function() { x(); }; }");
}
public void testCorrectCatch() {
assertNoWarning("function f() { try { var x = 2; } catch (x) {} }");
assertNoWarning("function f(e) { e = 3; try {} catch (e) {} }");
}
public void testRedeclare() {
// Only test local scope since global scope is covered elsewhere
assertRedeclare("function f() { var a = 2; var a = 3; }");
assertRedeclare("function f(a) { var a = 2; }");
assertRedeclare("function f(a) { if (!a) var a = 6; }");
}
public void testEarlyReference() {
assertUndeclared("function f() { a = 2; var a = 3; }");
}
public void testCorrectEarlyReference() {
assertNoWarning("var goog = goog || {}");
assertNoWarning("function f() { a = 2; } var a = 2;");
}
public void testUnreferencedBleedingFunction() {
assertNoWarning("var x = function y() {}");
}
public void testReferencedBleedingFunction() {
assertNoWarning("var x = function y() { return y(); }");
}
public void testDoubleDeclaration() {
assertRedeclare("function x(y) { if (true) { var y; } }");
}
public void testDoubleDeclaration2() {
assertRedeclare("function x() { var y; if (true) { var y; } }");
}
public void testHoistedFunction1() {
assertNoWarning("f(); function f() {}");
}
public void testHoistedFunction2() {
assertNoWarning("function g() { f(); function f() {} }");
}
public void testNonHoistedFunction() {
assertUndeclared("if (true) { f(); function f() {} }");
}
public void testNonHoistedFunction2() {
assertNoWarning("if (false) { function f() {} f(); }");
}
public void testNonHoistedFunction3() {
assertNoWarning("function g() { if (false) { function f() {} f(); }}");
}
public void testNonHoistedFunction4() {
assertAmbiguous("if (false) { function f() {} } f();");
}
public void testNonHoistedFunction5() {
assertAmbiguous("function g() { if (false) { function f() {} } f(); }");
}
public void testNonHoistedFunction6() {
assertUndeclared("if (false) { f(); function f() {} }");
}
public void testNonHoistedFunction7() {
assertUndeclared("function g() { if (false) { f(); function f() {} }}");
}
public void testNonHoistedRecursiveFunction1() {
assertNoWarning("if (false) { function f() { f(); }}");
}
public void testNonHoistedRecursiveFunction2() {
assertNoWarning("function g() { if (false) { function f() { f(); }}}");
}
public void testNonHoistedRecursiveFunction3() {
assertNoWarning("function g() { if (false) { function f() { f(); g(); }}}");
}
public void testDestructuringInFor() {
testSameEs6("for (let [key, val] of X){}");
testSameEs6("for (let [key, [nestKey, nestVal], val] of X){}");
testSameEs6("var {x: a, y: b} = {x: 1, y: 2}; a++; b++;");
testWarningEs6("a++; var {x: a} = {x: 1};",
VariableReferenceCheck.EARLY_REFERENCE);
}
public void testNoWarnInExterns1() {
// Verify duplicate suppressions are properly recognized.
String externs = "var google; /** @suppress {duplicate} */ var google";
String code = "";
testSame(externs, code, null);
}
public void testNoWarnInExterns2() {
// Verify we don't complain about early references in externs
String externs = "window; var window;";
String code = "";
testSame(externs, code, null);
}
public void testUnusedLocalVar() {
enableUnusedLocalAssignmentCheck = true;
assertUnused("function f() { var a; }");
assertUnused("function f() { var a = 2; }");
assertUnused("function f() { var a; a = 2; }");
}
/**
* Inside a goog.scope, don't warn because the alias might be used in a type annotation.
*/
public void testUnusedLocalVarInGoogScope() {
enableUnusedLocalAssignmentCheck = true;
testSame("goog.scope(function f() { var a; });");
testSame("goog.scope(function f() { /** @typedef {some.long.name} */ var a; });");
testSame("goog.scope(function f() { var a = some.long.name; });");
}
public void testUnusedLocalLet() {
enableUnusedLocalAssignmentCheck = true;
assertUnusedEs6("function f() { let a; }");
assertUnusedEs6("function f() { let a = 2; }");
assertUnusedEs6("function f() { let a; a = 2; }");
}
public void xtestUnusedLocalConst() {
enableUnusedLocalAssignmentCheck = true;
assertUnusedEs6("function f() { const a = 2; }");
}
public void testUnusedLocalArgNoWarning() {
enableUnusedLocalAssignmentCheck = true;
assertNoWarning("function f(a) {}");
}
public void testUnusedGlobalNoWarning() {
enableUnusedLocalAssignmentCheck = true;
assertNoWarning("var a = 2;");
}
public void testUnusedAssignedInInnerFunction() {
enableUnusedLocalAssignmentCheck = true;
assertUnused("function f() { var x = 1; function g() { x = 2; } }");
}
public void testUsedInInnerFunction() {
enableUnusedLocalAssignmentCheck = true;
assertNoWarning("function f() { var x = 1; function g() { use(x); } }");
}
public void testUnusedCatch() {
enableUnusedLocalAssignmentCheck = true;
assertNoWarning("function f() { try {} catch (x) {} }");
}
public void testIncrementCountsAsUse() {
enableUnusedLocalAssignmentCheck = true;
assertNoWarning("var a = 2; var b = []; b[a++] = 1;");
}
public void testForIn() {
enableUnusedLocalAssignmentCheck = true;
assertNoWarning("for (var prop in obj) {}");
}
/**
* Expects the JS to generate one bad-read error.
*/
private void assertRedeclare(String js) {
testWarning(js, VariableReferenceCheck.REDECLARED_VARIABLE);
}
/**
* Expects the JS to generate one bad-write warning.
*/
private void assertUndeclared(String js) {
testWarning(js, VariableReferenceCheck.EARLY_REFERENCE);
}
/**
* Expects the JS to generate one bad-write warning.
*/
private void assertAmbiguous(String js) {
testError(js, VariableReferenceCheck.AMBIGUOUS_FUNCTION_DECL,
LanguageMode.ECMASCRIPT5);
testSameEs6(js); // In ES6, these are block scoped functions, so no ambiguity.
}
/**
* Expects the JS to generate one unused local error.
*/
private void assertUnused(String js) {
testWarning(js, VariableReferenceCheck.UNUSED_LOCAL_ASSIGNMENT);
}
/**
* Expects the JS to generate one unused local error.
*/
private void assertUnusedEs6(String js) {
testWarningEs6(js, VariableReferenceCheck.UNUSED_LOCAL_ASSIGNMENT);
}
/**
* Expects the JS to generate no errors or warnings.
*/
private void assertNoWarning(String js) {
testSame(js);
}
}
| |
package gg.buff.dorans.objects.generated.lol_static;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.annotation.Generated;
import com.google.gson.annotations.Expose;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.commons.lang3.builder.ToStringBuilder;
/**
* BasicData
* <p>
* This object contains basic data
*/
@Generated("org.jsonschema2pojo")
public class BasicData {
@Expose
private java.lang.String colloq;
@Expose
private boolean consumeOnFull;
@Expose
private boolean consumed;
@Expose
private int depth;
@Expose
private java.lang.String description;
@Expose
private Object from;
/**
* Gold
* <p>
* This object contains item gold data
*/
@Expose
private Gold gold;
@Expose
private java.lang.String group;
@Expose
private boolean hideFromAll;
@Expose
private int id;
/**
* Image
* <p>
* This object contains spell vars data
*/
@Expose
private Image image;
@Expose
private boolean inStore;
@Expose
private List<java.lang.String> into = new ArrayList<java.lang.String>();
@Expose
private Map<String, Boolean> maps;
@Expose
private java.lang.String name;
@Expose
private java.lang.String plaintext;
@Expose
private java.lang.String requiredChampion;
/**
* MetaData
* <p>
* This object contains meta data
*/
@Expose
private MetaData rune;
@Expose
private java.lang.String sanitizedDescription;
@Expose
private int specialRecipe;
@Expose
private int stacks;
/**
* BasicDataStats
* <p>
* This object contains basic data stats
*/
@Expose
private BasicDataStats stats;
@Expose
private List<java.lang.String> tags = new ArrayList<java.lang.String>();
/**
* @return The colloq
*/
public java.lang.String getColloq() {
return colloq;
}
/**
* @param colloq
* The colloq
*/
public void setColloq(java.lang.String colloq) {
this.colloq = colloq;
}
/**
* @return The consumeOnFull
*/
public boolean isConsumeOnFull() {
return consumeOnFull;
}
/**
* @param consumeOnFull
* The consumeOnFull
*/
public void setConsumeOnFull(boolean consumeOnFull) {
this.consumeOnFull = consumeOnFull;
}
/**
* @return The consumed
*/
public boolean isConsumed() {
return consumed;
}
/**
* @param consumed
* The consumed
*/
public void setConsumed(boolean consumed) {
this.consumed = consumed;
}
/**
* @return The depth
*/
public int getDepth() {
return depth;
}
/**
* @param depth
* The depth
*/
public void setDepth(int depth) {
this.depth = depth;
}
/**
* @return The description
*/
public java.lang.String getDescription() {
return description;
}
/**
* @param description
* The description
*/
public void setDescription(java.lang.String description) {
this.description = description;
}
/**
* @return The from
*/
public Object getFrom() {
return from;
}
/**
* @param from
* The from
*/
public void setFrom(Object from) {
this.from = from;
}
/**
* Gold
* <p>
* This object contains item gold data
*
* @return The gold
*/
public Gold getGold() {
return gold;
}
/**
* Gold
* <p>
* This object contains item gold data
*
* @param gold
* The gold
*/
public void setGold(Gold gold) {
this.gold = gold;
}
/**
* @return The group
*/
public java.lang.String getGroup() {
return group;
}
/**
* @param group
* The group
*/
public void setGroup(java.lang.String group) {
this.group = group;
}
/**
* @return The hideFromAll
*/
public boolean isHideFromAll() {
return hideFromAll;
}
/**
* @param hideFromAll
* The hideFromAll
*/
public void setHideFromAll(boolean hideFromAll) {
this.hideFromAll = hideFromAll;
}
/**
* @return The id
*/
public int getId() {
return id;
}
/**
* @param id
* The id
*/
public void setId(int id) {
this.id = id;
}
/**
* Image
* <p>
* This object contains spell vars data
*
* @return The image
*/
public Image getImage() {
return image;
}
/**
* Image
* <p>
* This object contains spell vars data
*
* @param image
* The image
*/
public void setImage(Image image) {
this.image = image;
}
/**
* @return The inStore
*/
public boolean isInStore() {
return inStore;
}
/**
* @param inStore
* The inStore
*/
public void setInStore(boolean inStore) {
this.inStore = inStore;
}
/**
* @return The into
*/
public List<java.lang.String> getInto() {
return into;
}
/**
* @param into
* The into
*/
public void setInto(List<java.lang.String> into) {
this.into = into;
}
/**
* @return The maps
*/
public Map<String, Boolean> getMaps() {
return maps;
}
/**
* @param maps
* The maps
*/
public void setMaps(Map<String, Boolean> maps) {
this.maps = maps;
}
/**
* @return The name
*/
public java.lang.String getName() {
return name;
}
/**
* @param name
* The name
*/
public void setName(java.lang.String name) {
this.name = name;
}
/**
* @return The plaintext
*/
public java.lang.String getPlaintext() {
return plaintext;
}
/**
* @param plaintext
* The plaintext
*/
public void setPlaintext(java.lang.String plaintext) {
this.plaintext = plaintext;
}
/**
* @return The requiredChampion
*/
public java.lang.String getRequiredChampion() {
return requiredChampion;
}
/**
* @param requiredChampion
* The requiredChampion
*/
public void setRequiredChampion(java.lang.String requiredChampion) {
this.requiredChampion = requiredChampion;
}
/**
* MetaData
* <p>
* This object contains meta data
*
* @return The rune
*/
public MetaData getRune() {
return rune;
}
/**
* MetaData
* <p>
* This object contains meta data
*
* @param rune
* The rune
*/
public void setRune(MetaData rune) {
this.rune = rune;
}
/**
* @return The sanitizedDescription
*/
public java.lang.String getSanitizedDescription() {
return sanitizedDescription;
}
/**
* @param sanitizedDescription
* The sanitizedDescription
*/
public void setSanitizedDescription(java.lang.String sanitizedDescription) {
this.sanitizedDescription = sanitizedDescription;
}
/**
* @return The specialRecipe
*/
public int getSpecialRecipe() {
return specialRecipe;
}
/**
* @param specialRecipe
* The specialRecipe
*/
public void setSpecialRecipe(int specialRecipe) {
this.specialRecipe = specialRecipe;
}
/**
* @return The stacks
*/
public int getStacks() {
return stacks;
}
/**
* @param stacks
* The stacks
*/
public void setStacks(int stacks) {
this.stacks = stacks;
}
/**
* BasicDataStats
* <p>
* This object contains basic data stats
*
* @return The stats
*/
public BasicDataStats getStats() {
return stats;
}
/**
* BasicDataStats
* <p>
* This object contains basic data stats
*
* @param stats
* The stats
*/
public void setStats(BasicDataStats stats) {
this.stats = stats;
}
/**
* @return The tags
*/
public List<java.lang.String> getTags() {
return tags;
}
/**
* @param tags
* The tags
*/
public void setTags(List<java.lang.String> tags) {
this.tags = tags;
}
@Override
public java.lang.String toString() {
return ToStringBuilder.reflectionToString(this);
}
@Override
public int hashCode() {
return new HashCodeBuilder().append(colloq).append(consumeOnFull).append(consumed).append(depth).append(description).append(from).append(gold).append(group).append(hideFromAll).append(id).append(image).append(inStore).append(into).append(maps).append(name).append(plaintext).append(requiredChampion).append(rune).append(sanitizedDescription).append(specialRecipe).append(stacks).append(stats).append(tags).toHashCode();
}
@Override
public boolean equals(Object other) {
if (other == this) {
return true;
}
if ((other instanceof BasicData) == false) {
return false;
}
BasicData rhs = ((BasicData) other);
return new EqualsBuilder().append(colloq, rhs.colloq).append(consumeOnFull, rhs.consumeOnFull).append(consumed, rhs.consumed).append(depth, rhs.depth).append(description, rhs.description).append(from, rhs.from).append(gold, rhs.gold).append(group, rhs.group).append(hideFromAll, rhs.hideFromAll).append(id, rhs.id).append(image, rhs.image).append(inStore, rhs.inStore).append(into, rhs.into).append(maps, rhs.maps).append(name, rhs.name).append(plaintext, rhs.plaintext).append(requiredChampion, rhs.requiredChampion).append(rune, rhs.rune).append(sanitizedDescription, rhs.sanitizedDescription).append(specialRecipe, rhs.specialRecipe).append(stacks, rhs.stacks).append(stats, rhs.stats).append(tags, rhs.tags).isEquals();
}
}
| |
/*******************************************************************************
* Copyright (C) PicoContainer Organization. All rights reserved.
* ---------------------------------------------------------------------------
* The software in this package is published under the terms of the BSD style
* license a copy of which has been included with this distribution in the
* LICENSE.txt file.
******************************************************************************/
package org.picocontainer.classname;
import org.picocontainer.ComponentAdapter;
import org.picocontainer.*;
import org.picocontainer.security.CustomPermissionsURLClassLoader;
import org.picocontainer.lifecycle.LifecycleState;
import org.picocontainer.behaviors.Caching;
import org.picocontainer.containers.AbstractDelegatingMutablePicoContainer;
import java.io.File;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import java.net.URL;
import java.security.AccessController;
import java.security.CodeSource;
import java.security.PrivilegedAction;
import java.security.Permissions;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.regex.Pattern;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
/**
* Default implementation of ClassLoadingPicoContainer.
*
* @author Paul Hammant
* @author Mauro Talevi
* @author Michael Rimov
*/
@SuppressWarnings("serial")
public class DefaultClassLoadingPicoContainer extends AbstractDelegatingMutablePicoContainer implements
ClassLoadingPicoContainer, ComponentMonitorStrategy {
/**
* Converting Map to allow for primitives to be boxed to Object types.
*/
private static final transient Map<String, String> primitiveNameToBoxedName = new HashMap<String, String>();
static {
primitiveNameToBoxedName.put("int", Integer.class.getName());
primitiveNameToBoxedName.put("byte", Byte.class.getName());
primitiveNameToBoxedName.put("short", Short.class.getName());
primitiveNameToBoxedName.put("long", Long.class.getName());
primitiveNameToBoxedName.put("float", Float.class.getName());
primitiveNameToBoxedName.put("double", Double.class.getName());
primitiveNameToBoxedName.put("boolean", Boolean.class.getName());
}
private final transient List<ClassPathElement> classPathElements = new ArrayList<ClassPathElement>();
private final transient ClassLoader parentClassLoader;
private transient ClassLoader componentClassLoader;
private transient boolean componentClassLoaderLocked;
protected final Map<String, PicoContainer> namedChildContainers = new HashMap<String, PicoContainer>();
public DefaultClassLoadingPicoContainer(ClassLoader classLoader, ComponentFactory componentFactory, PicoContainer parent) {
super(new DefaultPicoContainer(componentFactory, parent));
parentClassLoader = classLoader;
}
public DefaultClassLoadingPicoContainer(ClassLoader classLoader, MutablePicoContainer delegate) {
super(delegate);
parentClassLoader = classLoader;
}
public DefaultClassLoadingPicoContainer(ClassLoader classLoader, PicoContainer parent, ComponentMonitor componentMonitor) {
super(new DefaultPicoContainer(new Caching(), parent));
parentClassLoader = classLoader;
((ComponentMonitorStrategy) getDelegate()).changeMonitor(componentMonitor);
}
public DefaultClassLoadingPicoContainer(ComponentFactory componentFactory) {
super(new DefaultPicoContainer(componentFactory, null));
parentClassLoader = DefaultClassLoadingPicoContainer.class.getClassLoader();
}
public DefaultClassLoadingPicoContainer(PicoContainer parent) {
super(new DefaultPicoContainer(parent));
parentClassLoader = DefaultClassLoadingPicoContainer.class.getClassLoader();
}
public DefaultClassLoadingPicoContainer(MutablePicoContainer delegate) {
super(delegate);
parentClassLoader = DefaultClassLoadingPicoContainer.class.getClassLoader();
}
public DefaultClassLoadingPicoContainer(ClassLoader classLoader) {
super(new DefaultPicoContainer());
parentClassLoader = classLoader;
}
public DefaultClassLoadingPicoContainer() {
super(new DefaultPicoContainer());
parentClassLoader = DefaultClassLoadingPicoContainer.class.getClassLoader();
}
public DefaultClassLoadingPicoContainer(ComponentFactory componentFactory, LifecycleStrategy lifecycleStrategy,
PicoContainer parent, ClassLoader cl, ComponentMonitor componentMonitor) {
super(new DefaultPicoContainer(componentFactory, lifecycleStrategy, parent, componentMonitor));
parentClassLoader = (cl != null) ? cl : DefaultClassLoadingPicoContainer.class.getClassLoader();
}
protected DefaultClassLoadingPicoContainer createChildContainer() {
MutablePicoContainer child = getDelegate().makeChildContainer();
DefaultClassLoadingPicoContainer container = new DefaultClassLoadingPicoContainer(getComponentClassLoader(), child);
container.changeMonitor(currentMonitor());
return container;
}
/**
* Propagates the monitor change down the delegate chain if a delegate that implements ComponentMonitorStrategy
* exists. Because of the ComponentMonitorStrategy API, not all delegates can have their API changed. If
* a delegate implementing ComponentMonitorStrategy cannot be found, an exception is thrown.
* @throws IllegalStateException if no delegate can be found that implements ComponentMonitorStrategy.
* @param monitor the monitor to swap.
*/
public void changeMonitor(ComponentMonitor monitor) {
MutablePicoContainer picoDelegate = getDelegate();
while (picoDelegate != null) {
if (picoDelegate instanceof ComponentMonitorStrategy) {
((ComponentMonitorStrategy)picoDelegate).changeMonitor(monitor);
return;
}
if (picoDelegate instanceof AbstractDelegatingMutablePicoContainer) {
picoDelegate = ((AbstractDelegatingMutablePicoContainer)picoDelegate).getDelegate();
} else {
break;
}
}
throw new IllegalStateException("Could not find delegate picocontainer that implemented ComponentMonitorStrategy");
}
public ComponentMonitor currentMonitor() {
MutablePicoContainer picoDelegate = getDelegate();
while (picoDelegate != null) {
if (picoDelegate instanceof ComponentMonitorStrategy) {
return ((ComponentMonitorStrategy)picoDelegate).currentMonitor();
}
if (picoDelegate instanceof AbstractDelegatingMutablePicoContainer) {
picoDelegate = ((AbstractDelegatingMutablePicoContainer)picoDelegate).getDelegate();
} else {
break;
}
}
throw new IllegalStateException("Could not find delegate picocontainer that implemented ComponentMonitorStrategy");
}
public final Object getComponent(Object componentKeyOrType) throws PicoException {
if (componentKeyOrType instanceof ClassName) {
componentKeyOrType = loadClass((ClassName) componentKeyOrType);
}
Object instance = getDelegate().getComponent(componentKeyOrType);
if (instance != null) {
return instance;
}
ComponentAdapter<?> componentAdapter = null;
if (componentKeyOrType.toString().startsWith("*")) {
String candidateClassName = componentKeyOrType.toString().substring(1);
Collection<ComponentAdapter<?>> cas = getComponentAdapters();
for (ComponentAdapter<?> ca : cas) {
Object key = ca.getComponentKey();
if (key instanceof Class && candidateClassName.equals(((Class<?>) key).getName())) {
componentAdapter = ca;
break;
}
}
}
if (componentAdapter != null) {
return componentAdapter.getComponentInstance(this, ComponentAdapter.NOTHING.class);
} else {
return getComponentInstanceFromChildren(componentKeyOrType);
}
}
private Object getComponentInstanceFromChildren(Object componentKey) {
String componentKeyPath = componentKey.toString();
int ix = componentKeyPath.indexOf('/');
if (ix != -1) {
String firstElement = componentKeyPath.substring(0, ix);
String remainder = componentKeyPath.substring(ix + 1, componentKeyPath.length());
Object o = getNamedContainers().get(firstElement);
if (o != null) {
MutablePicoContainer child = (MutablePicoContainer) o;
return child.getComponent(remainder);
}
}
return null;
}
public final MutablePicoContainer makeChildContainer() {
return makeChildContainer("containers" + namedChildContainers.size());
}
/**
* Makes a child container with the same basic characteristics of
* <tt>this</tt> object (ComponentFactory, PicoContainer type, Behavior,
* etc)
*
* @param name the name of the child container
* @return The child MutablePicoContainer
*/
public ClassLoadingPicoContainer makeChildContainer(String name) {
DefaultClassLoadingPicoContainer child = createChildContainer();
MutablePicoContainer parentDelegate = getDelegate();
parentDelegate.removeChildContainer(child.getDelegate());
parentDelegate.addChildContainer(child);
namedChildContainers.put(name, child);
return child;
}
public boolean removeChildContainer(PicoContainer child) {
boolean result = getDelegate().removeChildContainer(child);
Iterator<Map.Entry<String, PicoContainer>> children = namedChildContainers.entrySet().iterator();
while (children.hasNext()) {
Map.Entry<String, PicoContainer> e = children.next();
PicoContainer pc = e.getValue();
if (pc == child) {
children.remove();
}
}
return result;
}
protected final Map<String, PicoContainer> getNamedContainers() {
return namedChildContainers;
}
public ClassPathElement addClassLoaderURL(URL url) {
if (componentClassLoaderLocked) {
throw new IllegalStateException("ClassLoader URLs cannot be added once this instance is locked");
}
ClassPathElement classPathElement = new ClassPathElement(url);
classPathElements.add(classPathElement);
return classPathElement;
}
public MutablePicoContainer addComponent(Object implOrInstance) {
if (implOrInstance instanceof ClassName) {
super.addComponent(loadClass((ClassName) implOrInstance));
} else {
super.addComponent(implOrInstance);
}
return this;
}
public MutablePicoContainer addComponent(Object key, Object componentImplementationOrInstance,
Parameter... parameters) {
super.addComponent(classNameToClassIfApplicable(key),
classNameToClassIfApplicable(componentImplementationOrInstance), parameters);
return this;
}
private Object classNameToClassIfApplicable(Object key) {
if (key instanceof ClassName) {
key = loadClass((ClassName) key);
}
return key;
}
public MutablePicoContainer addAdapter(ComponentAdapter<?> componentAdapter) throws PicoCompositionException {
super.addAdapter(componentAdapter);
return this;
}
public ClassLoader getComponentClassLoader() {
if (componentClassLoader == null) {
componentClassLoaderLocked = true;
componentClassLoader = AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() {
public ClassLoader run() {
return new CustomPermissionsURLClassLoader(getURLs(classPathElements), makePermissions(),
parentClassLoader);
}
});
}
return componentClassLoader;
}
public MutablePicoContainer addChildContainer(PicoContainer child) {
getDelegate().addChildContainer(child);
namedChildContainers.put("containers" + namedChildContainers.size(), child);
return this;
}
public ClassLoadingPicoContainer addChildContainer(String name, PicoContainer child) {
super.addChildContainer(child);
namedChildContainers.put(name, child);
return this;
}
private Class<?> loadClass(final ClassName className) {
ClassLoader classLoader = getComponentClassLoader();
// this is deliberately not a doPrivileged operation.
String cn = getClassName(className.toString());
try {
return classLoader.loadClass(cn);
} catch (ClassNotFoundException e) {
throw new PicoClassNotFoundException(cn, e);
}
}
private Map<URL, Permissions> makePermissions() {
Map<URL, Permissions> permissionsMap = new HashMap<URL, Permissions>();
for (ClassPathElement cpe : classPathElements) {
Permissions permissionCollection = cpe.getPermissionCollection();
permissionsMap.put(cpe.getUrl(), permissionCollection);
}
return permissionsMap;
}
private URL[] getURLs(List<ClassPathElement> classPathElemelements) {
final URL[] urls = new URL[classPathElemelements.size()];
for (int i = 0; i < urls.length; i++) {
urls[i] = (classPathElemelements.get(i)).getUrl();
}
return urls;
}
private static String getClassName(String primitiveOrClass) {
String fromMap = primitiveNameToBoxedName.get(primitiveOrClass);
return fromMap != null ? fromMap : primitiveOrClass;
}
public ComponentAdapter<?> getComponentAdapter(Object componentKey) {
Object componentKey2 = componentKey;
if (componentKey instanceof ClassName) {
componentKey2 = loadClass((ClassName) componentKey);
}
return super.getComponentAdapter(componentKey2);
}
public MutablePicoContainer change(Properties... properties) {
super.change(properties);
return this;
}
public MutablePicoContainer as(Properties... properties) {
return new AsPropertiesPicoContainer(properties);
}
private class AsPropertiesPicoContainer implements ClassLoadingPicoContainer {
private MutablePicoContainer delegate;
public AsPropertiesPicoContainer(Properties... props) {
delegate = DefaultClassLoadingPicoContainer.this.getDelegate().as(props);
}
public ClassPathElement addClassLoaderURL(URL url) {
return DefaultClassLoadingPicoContainer.this.addClassLoaderURL(url);
}
public ClassLoader getComponentClassLoader() {
return DefaultClassLoadingPicoContainer.this.getComponentClassLoader();
}
public ClassLoadingPicoContainer makeChildContainer(String name) {
return DefaultClassLoadingPicoContainer.this.makeChildContainer(name);
}
public ClassLoadingPicoContainer addChildContainer(String name, PicoContainer child) {
return (ClassLoadingPicoContainer) DefaultClassLoadingPicoContainer.this.addChildContainer(child);
}
public MutablePicoContainer addComponent(Object componentKey, Object componentImplementationOrInstance,
Parameter... parameters) {
delegate.addComponent(classNameToClassIfApplicable(componentKey),
classNameToClassIfApplicable(componentImplementationOrInstance), parameters);
return DefaultClassLoadingPicoContainer.this;
}
public MutablePicoContainer addComponent(Object implOrInstance) {
delegate.addComponent(classNameToClassIfApplicable(implOrInstance));
return DefaultClassLoadingPicoContainer.this;
}
public MutablePicoContainer addConfig(String name, Object val) {
delegate.addConfig(name, val);
return DefaultClassLoadingPicoContainer.this;
}
public MutablePicoContainer addAdapter(ComponentAdapter<?> componentAdapter) {
delegate.addAdapter(componentAdapter);
return DefaultClassLoadingPicoContainer.this;
}
public ComponentAdapter removeComponent(Object componentKey) {
return delegate.removeComponent(componentKey);
}
public ComponentAdapter removeComponentByInstance(Object componentInstance) {
return delegate.removeComponentByInstance(componentInstance);
}
public MutablePicoContainer makeChildContainer() {
return DefaultClassLoadingPicoContainer.this.makeChildContainer();
}
public MutablePicoContainer addChildContainer(PicoContainer child) {
return DefaultClassLoadingPicoContainer.this.addChildContainer(child);
}
public boolean removeChildContainer(PicoContainer child) {
return DefaultClassLoadingPicoContainer.this.removeChildContainer(child);
}
public MutablePicoContainer change(Properties... properties) {
return DefaultClassLoadingPicoContainer.this.change(properties);
}
public MutablePicoContainer as(Properties... properties) {
return new AsPropertiesPicoContainer(properties);
}
public Object getComponent(Object componentKeyOrType) {
return DefaultClassLoadingPicoContainer.this.getComponent(componentKeyOrType);
}
public Object getComponent(Object componentKeyOrType, Type into) {
return DefaultClassLoadingPicoContainer.this.getComponent(componentKeyOrType, into);
}
public <T> T getComponent(Class<T> componentType) {
return DefaultClassLoadingPicoContainer.this.getComponent(componentType);
}
public <T> T getComponent(Class<T> componentType, Class<? extends Annotation> binding) {
return DefaultClassLoadingPicoContainer.this.getComponent(componentType, binding);
}
public List<Object> getComponents() {
return DefaultClassLoadingPicoContainer.this.getComponents();
}
public PicoContainer getParent() {
return DefaultClassLoadingPicoContainer.this.getParent();
}
public ComponentAdapter<?> getComponentAdapter(Object componentKey) {
return DefaultClassLoadingPicoContainer.this.getComponentAdapter(componentKey);
}
public <T> ComponentAdapter<T> getComponentAdapter(Class<T> componentType, NameBinding componentNameBinding) {
return DefaultClassLoadingPicoContainer.this.getComponentAdapter(componentType, componentNameBinding);
}
public <T> ComponentAdapter<T> getComponentAdapter(Class<T> componentType, Class<? extends Annotation> binding) {
return DefaultClassLoadingPicoContainer.this.getComponentAdapter(componentType, binding);
}
public Collection<ComponentAdapter<?>> getComponentAdapters() {
return DefaultClassLoadingPicoContainer.this.getComponentAdapters();
}
public <T> List<ComponentAdapter<T>> getComponentAdapters(Class<T> componentType) {
return DefaultClassLoadingPicoContainer.this.getComponentAdapters(componentType);
}
public <T> List<ComponentAdapter<T>> getComponentAdapters(Class<T> componentType,
Class<? extends Annotation> binding) {
return DefaultClassLoadingPicoContainer.this.getComponentAdapters(componentType, binding);
}
public <T> List<T> getComponents(Class<T> componentType) {
return DefaultClassLoadingPicoContainer.this.getComponents(componentType);
}
public void accept(PicoVisitor visitor) {
DefaultClassLoadingPicoContainer.this.accept(visitor);
}
public void start() {
//This implementation does nothing on lifecycle triggers.
}
public void stop() {
//This implementation does nothing on lifecycle triggers.
}
public void dispose() {
//This implementation does nothing on lifecycle triggers.
}
public void setName(String name) {
DefaultClassLoadingPicoContainer.this.setName(name);
}
public void setLifecycleState(LifecycleState lifecycleState) {
DefaultClassLoadingPicoContainer.this.setLifecycleState(lifecycleState);
}
public Converters getConverter() {
return DefaultClassLoadingPicoContainer.this.getConverters();
}
/**
* {@inheritDoc}
* @see org.picocontainer.MutablePicoContainer#getLifecycleState()
*/
public LifecycleState getLifecycleState() {
return DefaultClassLoadingPicoContainer.this.getLifecycleState();
}
/**
* {@inheritDoc}
* @see org.picocontainer.MutablePicoContainer#getName()
*/
public String getName() {
return DefaultClassLoadingPicoContainer.this.getName();
}
}
public int visit(ClassName thisClassesPackage, String regex, boolean recursive, ClassVisitor classNameVisitor) {
Class clazz = loadClass(thisClassesPackage);
String pkgName = clazz.getPackage().getName().replace(".", File.separator);
CodeSource codeSource = clazz.getProtectionDomain().getCodeSource();
if(codeSource == null) {
throw new PicoCompositionException("no codesource for " + thisClassesPackage);
}
String codeSourceRoot = codeSource.getLocation().getFile();
String fileName = codeSourceRoot + File.separator + pkgName;
File file = new File(fileName);
Pattern compiledPattern = Pattern.compile(regex);
if (file.exists()) {
if (file.isFile()) {
file = file.getParentFile();
}
return visit(file, pkgName, compiledPattern, recursive, classNameVisitor);
} else {
return visit(pkgName, codeSourceRoot, compiledPattern, recursive, classNameVisitor);
}
}
public int visit(String pkgName, String codeSourceRoot, Pattern compiledPattern, boolean recursive, ClassVisitor classNameVisitor) {
int found = 0;
try {
ZipFile zip = new ZipFile(new File(codeSourceRoot));
for (Enumeration e = zip.entries(); e.hasMoreElements();) {
ZipEntry entry = (ZipEntry) e.nextElement();
String entryName = entry.getName();
if (entryName.startsWith(pkgName) && entryName.endsWith(".class")) {
String name = entryName.substring(pkgName.length()+1);
if (name.endsWith("XStream.class")) {
System.out.println();
}
int length = name.split("/").length;
if (length == 1 || recursive) {
found = visit(pkgName, compiledPattern, classNameVisitor, found, entryName.replace("/","."), null);
}
}
}
} catch (IOException e) {
e.printStackTrace();
}
return found;
}
public int visit(File pkgDir, String pkgName, Pattern pattern, boolean recursive, ClassVisitor classNameVisitor) {
int found = 0;
File files[] = pkgDir.listFiles();
if(files != null) {
for (File file : files) {
if (file.isDirectory()) {
if (recursive) {
found = found + visit(file, pkgName, pattern, recursive, classNameVisitor);
}
} else {
found = visit(pkgName, pattern, classNameVisitor, found, file.getName(), file.getAbsolutePath());
}
}
}
return found;
}
private int visit(String pkgName, Pattern pattern, ClassVisitor classNameVisitor, int foundSoFar, String fileName, String absolutePath) {
boolean matches = pattern.matcher(fileName).matches();
if (matches) {
if (absolutePath != null) {
String fqn = absolutePath.substring(absolutePath.indexOf(pkgName));
fileName = fqn.substring(0, fqn.indexOf(".class")).replace(File.separator, ".");
} else {
fileName = fileName.substring(0, fileName.indexOf(".class"));
}
classNameVisitor.classFound(loadClass(new ClassName(fileName)));
foundSoFar++;
}
return foundSoFar;
}
public interface ClassVisitor {
void classFound(Class clazz);
}
}
| |
/*
* Copyright (c) 2017, Apptentive, Inc. All Rights Reserved.
* Please refer to the LICENSE file for the terms and conditions
* under which redistribution and use of this file is permitted.
*/
package com.apptentive.android.sdk;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.Application;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.ActivityInfo;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.content.res.Resources;
import android.content.res.TypedArray;
import android.os.Build;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.core.content.ContextCompat;
import android.text.TextUtils;
import com.apptentive.android.sdk.Apptentive.LoginCallback;
import com.apptentive.android.sdk.comm.ApptentiveHttpClient;
import com.apptentive.android.sdk.conversation.Conversation;
import com.apptentive.android.sdk.conversation.ConversationManager;
import com.apptentive.android.sdk.conversation.ConversationProxy;
import com.apptentive.android.sdk.debug.ErrorMetrics;
import com.apptentive.android.sdk.debug.LogMonitor;
import com.apptentive.android.sdk.encryption.SecurityManager;
import com.apptentive.android.sdk.lifecycle.ApptentiveActivityLifecycleCallbacks;
import com.apptentive.android.sdk.model.Configuration;
import com.apptentive.android.sdk.model.EventPayload;
import com.apptentive.android.sdk.model.LogoutPayload;
import com.apptentive.android.sdk.module.engagement.EngagementModule;
import com.apptentive.android.sdk.module.engagement.interaction.InteractionManager;
import com.apptentive.android.sdk.module.engagement.interaction.model.Interaction;
import com.apptentive.android.sdk.module.engagement.interaction.model.MessageCenterInteraction;
import com.apptentive.android.sdk.module.engagement.interaction.model.TermsAndConditions;
import com.apptentive.android.sdk.module.messagecenter.MessageManager;
import com.apptentive.android.sdk.module.rating.IRatingProvider;
import com.apptentive.android.sdk.module.rating.impl.GooglePlayRatingProvider;
import com.apptentive.android.sdk.module.survey.OnSurveyFinishedListener;
import com.apptentive.android.sdk.notifications.ApptentiveNotification;
import com.apptentive.android.sdk.notifications.ApptentiveNotificationCenter;
import com.apptentive.android.sdk.notifications.ApptentiveNotificationObserver;
import com.apptentive.android.sdk.partners.apptimize.ApptentiveApptimize;
import com.apptentive.android.sdk.partners.apptimize.ApptentiveApptimizeTestInfo;
import com.apptentive.android.sdk.storage.*;
import com.apptentive.android.sdk.util.*;
import com.apptentive.android.sdk.util.threading.DispatchQueue;
import com.apptentive.android.sdk.util.threading.DispatchTask;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.File;
import java.lang.ref.WeakReference;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.LinkedBlockingQueue;
import static com.apptentive.android.sdk.ApptentiveHelper.*;
import static com.apptentive.android.sdk.ApptentiveLogTag.*;
import static com.apptentive.android.sdk.ApptentiveNotifications.*;
import static com.apptentive.android.sdk.debug.Assert.assertNotNull;
import static com.apptentive.android.sdk.debug.Assert.assertTrue;
import static com.apptentive.android.sdk.util.Constants.CONVERSATIONS_DIR;
/**
* This class contains only internal methods. These methods should not be access directly by the host app.
*/
public class ApptentiveInternal implements ApptentiveInstance, ApptentiveNotificationObserver {
private final ApptentiveTaskManager taskManager;
private final ApptentiveHttpClient apptentiveHttpClient;
private final ConversationManager conversationManager;
// These variables are initialized in Apptentive.register(), and so they are freely thereafter. If they are unexpectedly null, then if means the host app did not register Apptentive.
private final Context appContext;
// We keep a readonly reference to AppRelease object since it won't change at runtime
private final AppRelease appRelease;
private final SharedPreferences globalSharedPrefs;
private final String apptentiveKey;
private final String apptentiveSignature;
private String serverUrl;
private String appPackageName;
private TermsAndConditions surveyTermsAndConditions;
// toolbar theme specified in R.attr.apptentiveToolbarTheme
private Resources.Theme apptentiveToolbarTheme;
// app default appcompat theme res id, if specified in app AndroidManifest
private int appDefaultAppCompatThemeId;
private int statusBarColorDefault;
private String defaultAppDisplayName = "this app";
// booleans to prevent starting multiple fetching asyncTasks simultaneously
private IRatingProvider ratingProvider;
private Map<String, String> ratingProviderArgs;
private WeakReference<OnSurveyFinishedListener> onSurveyFinishedListener;
private final LinkedBlockingQueue interactionUpdateListeners = new LinkedBlockingQueue();
private WeakReference<Apptentive.AuthenticationFailedListener> authenticationFailedListenerRef = null;
// Used for temporarily holding customData that needs to be sent on the next message the consumer sends.
private Map<String, Object> customData;
private ThrottleUtils throttleUtils;
private static final String PUSH_ACTION = "action";
private static final String PUSH_CONVERSATION_ID = "conversation_id";
private static final int LOG_HISTORY_SIZE = 2;
private enum PushAction {
pmc, // Present Message Center.
unknown; // Anything unknown will not be handled.
public static PushAction parse(String name) {
try {
return PushAction.valueOf(name);
} catch (IllegalArgumentException e) {
ApptentiveLog.w(PUSH, "This version of the SDK can't handle push action '%s'", name);
logException(e);
}
return unknown;
}
}
@SuppressLint("StaticFieldLeak")
private static volatile ApptentiveInternal sApptentiveInternal;
private static final ApptentiveInstance NULL_INSTANCE = new ApptentiveNullInstance();
// for unit testing
public ApptentiveInternal(Context appContext) {
taskManager = null;
globalSharedPrefs = null;
apptentiveKey = null;
apptentiveSignature = null;
apptentiveHttpClient = null;
conversationManager = null;
this.appContext = appContext;
appRelease = null;
}
private ApptentiveInternal(Application application, ApptentiveConfiguration configuration, @NonNull String androidID) {
if (configuration == null) {
throw new IllegalArgumentException("Configuration is null");
}
String apptentiveKey = configuration.getApptentiveKey();
String apptentiveSignature = configuration.getApptentiveSignature();
String serverUrl = configuration.getBaseURL();
this.apptentiveKey = apptentiveKey;
this.apptentiveSignature = apptentiveSignature;
this.serverUrl = serverUrl;
this.surveyTermsAndConditions = configuration.getSurveyTermsAndConditions();
boolean shouldEncryptStorage = configuration.shouldEncryptStorage();
Encryption encryption = SecurityManager.getEncryption(application.getApplicationContext(), configuration.getEncryption(), shouldEncryptStorage);
appContext = application.getApplicationContext();
globalSharedPrefs = application.getSharedPreferences(Constants.PREF_NAME, Context.MODE_PRIVATE);
apptentiveHttpClient = new ApptentiveHttpClient(apptentiveKey, apptentiveSignature, getEndpointBase(globalSharedPrefs));
this.throttleUtils = new ThrottleUtils(configuration.getInteractionThrottle(), getGlobalSharedPrefs());
DeviceManager deviceManager = new DeviceManager(androidID);
conversationManager = new ConversationManager(appContext, Util.getInternalDir(appContext, CONVERSATIONS_DIR, true), encryption, deviceManager);
appRelease = AppReleaseManager.generateCurrentAppRelease(application, this);
taskManager = new ApptentiveTaskManager(appContext, apptentiveHttpClient, encryption);
ApptentiveNotificationCenter.defaultCenter()
.addObserver(NOTIFICATION_CONVERSATION_STATE_DID_CHANGE, this)
.addObserver(NOTIFICATION_CONVERSATION_WILL_LOGOUT, this)
.addObserver(NOTIFICATION_AUTHENTICATION_FAILED, this)
.addObserver(NOTIFICATION_INTERACTION_MANIFEST_FETCHED, this)
.addObserver(NOTIFICATION_APP_ENTERED_FOREGROUND, this)
.addObserver(NOTIFICATION_APP_ENTERED_BACKGROUND, this)
.addObserver(NOTIFICATION_CONFIGURATION_FETCH_DID_FINISH, this);
}
public static boolean isApptentiveRegistered() {
return sApptentiveInternal != null;
}
public static boolean isConversationActive() {
return sApptentiveInternal != null && sApptentiveInternal.getConversation() != null;
}
/**
* Create a new or return a existing thread-safe instance of the Apptentive SDK. If this
* or any other {@link #getInstance()} has already been called in the application's lifecycle, the
* App key will be ignored and the current instance will be returned.
* <p/>
* This will be called from the application's onCreate(), before any other application objects have been
* created. Since the time spent in this function directly impacts the performance of starting the first activity,
* service, or receiver in the hosting app's process, the initialization of Apptentive is deferred to the first time
* {@link #getInstance()} is called.
*
* @param application the context of the app that is creating the instance
*/
static void createInstance(@NonNull Application application, @NonNull ApptentiveConfiguration configuration) {
final String apptentiveKey = configuration.getApptentiveKey();
final String apptentiveSignature = configuration.getApptentiveSignature();
// set log message sanitizing
ApptentiveLog.setShouldSanitizeLogMessages(configuration.shouldSanitizeLogMessages());
// set log level before we initialize log monitor since log monitor can override it as well
ApptentiveLog.overrideLogLevel(configuration.getLogLevel());
synchronized (ApptentiveInternal.class) {
if (sApptentiveInternal == null) {
ApptentiveLog.i("Registering Apptentive Android SDK %s", Constants.getApptentiveSdkVersion());
ApptentiveLog.v("ApptentiveKey=%s ApptentiveSignature=%s", apptentiveKey, apptentiveSignature);
// resolve Android ID
boolean shouldGenerateRandomAndroidID = Build.VERSION.SDK_INT < Build.VERSION_CODES.O && !configuration.shouldCollectAndroidIdOnPreOreoTargets();
String androidID = resolveAndroidID(application.getApplicationContext(), shouldGenerateRandomAndroidID);
dispatchOnConversationQueue(new DispatchTask() {
@Override
protected void execute() {
// troubleshooting mode
if (configuration.isTroubleshootingModeEnabled()) {
// initialize log writer
ApptentiveLog.initializeLogWriter(application.getApplicationContext(), LOG_HISTORY_SIZE);
// try initializing log monitor
LogMonitor.startSession(application.getApplicationContext(), apptentiveKey, apptentiveSignature);
} else {
ApptentiveLog.i(TROUBLESHOOT, "Troubleshooting is disabled in the app configuration");
}
sApptentiveInternal = new ApptentiveInternal(application, configuration, androidID);
sApptentiveInternal.start();
}
});
ApptentiveActivityLifecycleCallbacks.register(application);
} else {
ApptentiveLog.w("Apptentive instance is already initialized");
}
}
}
/**
* Retrieve the existing instance of the Apptentive class. If {@link Apptentive#register(Application)} is
* not called prior to this, it will return null; Otherwise, it will return the singleton instance initialized.
*
* @return the existing instance of the Apptentive SDK fully initialized with API key, or null
*/
public static @NonNull ApptentiveInstance getInstance() {
synchronized (ApptentiveInternal.class) {
return sApptentiveInternal != null ? sApptentiveInternal : NULL_INSTANCE;
}
}
@Override
public boolean isNull() {
return false;
}
/**
* Use this method to set or clear the internal state (pass in null)
* Note: designed to be used for unit testing only
*
* @param instance the internal instance to be set to
*/
public static void setInstance(ApptentiveInternal instance) {
sApptentiveInternal = instance;
}
/*
* Set default theme whom Apptentive UI will inherit theme attributes from. Apptentive will only
* inherit from an AppCompat theme
* @param themeResId : resource id of the theme style definition, such as R.style.MyAppTheme
* @return true if the theme is set for inheritance successfully.
*/
private boolean setApplicationDefaultTheme(int themeResId) {
try {
if (themeResId != 0) {
// If passed theme res id does not exist, an exception would be thrown and caught
appContext.getResources().getResourceName(themeResId);
// Check if the theme to be inherited from is an AppCompat theme.
Resources.Theme appDefaultTheme = appContext.getResources().newTheme();
appDefaultTheme.applyStyle(themeResId, true);
TypedArray a = appDefaultTheme.obtainStyledAttributes(androidx.appcompat.R.styleable.AppCompatTheme);
try {
if (a.hasValue(androidx.appcompat.R.styleable.AppCompatTheme_colorPrimaryDark)) {
// Only set to use if it's an AppCompat theme. See updateApptentiveInteractionTheme() for theme inheritance chain
appDefaultAppCompatThemeId = themeResId;
return true;
}
} finally {
a.recycle();
}
}
} catch (Resources.NotFoundException e) {
ApptentiveLog.e("Theme Res id not found");
logException(e);
}
return false;
}
/**
* Must be called after {@link ApptentiveInternal#setApplicationDefaultTheme(int)}
*
* @return true it the app is using an AppCompat theme
*/
public boolean isAppUsingAppCompatTheme() {
return appDefaultAppCompatThemeId != 0;
}
// Object getter methods reqiure an instance. Get an instance with ApptentiveInternal.getInstance()
public Context getApplicationContext() {
return appContext;
}
public int getApplicationVersionCode() {
return appRelease.getVersionCode();
}
public String getApplicationVersionName() {
return appRelease.getVersionName();
}
/* Get the foreground activity from the current application, i.e. at the top of the task
* It is tracked through {@link #onActivityStarted(Activity)} and {@link #onActivityStopped(Activity)}
*
* If Apptentive interaction is to be launched from a non-activity context, use the current activity at
* the top of the task stack, i.e. the foreground activity.
*/
public @Nullable Activity getCurrentTaskStackTopActivity() {
return ApptentiveActivityLifecycleCallbacks.getCurrentTopActivity();
}
public ApptentiveTaskManager getApptentiveTaskManager() {
return taskManager;
}
public ConversationManager getConversationManager() {
return conversationManager;
}
@Override
public Resources.Theme getApptentiveToolbarTheme() {
return apptentiveToolbarTheme;
}
@Override
public int getDefaultStatusBarColor() {
return statusBarColorDefault;
}
public Conversation getConversation() {
return conversationManager.getActiveConversation();
}
public @Nullable ConversationProxy getConversationProxy() {
return conversationManager.getActiveConversationProxy();
}
public String getApptentiveKey() {
return apptentiveKey;
}
public String getApptentiveSignature() {
return apptentiveSignature;
}
public String getServerUrl() {
if (serverUrl == null) {
return Constants.CONFIG_DEFAULT_SERVER_URL;
}
return serverUrl;
}
public String getDefaultAppDisplayName() {
return defaultAppDisplayName;
}
public TermsAndConditions getSurveyTermsAndConditions() {
return surveyTermsAndConditions;
}
@Override
public boolean shouldThrottleInteraction(Interaction.Type interactionType) {
return throttleUtils.shouldThrottleInteraction(interactionType);
}
public boolean isApptentiveDebuggable() {
return appRelease.isDebug();
}
public SharedPreferences getGlobalSharedPrefs() {
return globalSharedPrefs;
}
// TODO: remove app release from this class
public AppRelease getAppRelease() {
return appRelease;
}
public ApptentiveHttpClient getApptentiveHttpClient() {
return apptentiveHttpClient;
}
private void onAppLaunch(final Context appContext) {
checkConversationQueue();
if (isConversationActive()) {
Conversation conversation = getConversation();
if (!conversation.hasSession()) {
conversation.startSession();
}
engageInternal(appContext, EventPayload.EventLabel.app__launch.getLabelName());
}
}
private void onAppExit(final Context appContext) {
checkConversationQueue();
if (isConversationActive()) {
engageInternal(appContext, EventPayload.EventLabel.app__exit.getLabelName());
getConversation().endSession();
}
}
private void onAppEnterForeground() {
checkConversationQueue();
// Try to initialize log monitor
if (ApptentiveLog.isLogWriterInitialized()) {
LogMonitor.startSession(appContext, apptentiveKey, apptentiveSignature);
}
onAppLaunch(getApplicationContext());
}
private void onAppEnterBackground() {
checkConversationQueue();
onAppExit(getApplicationContext());
}
/* Apply Apptentive styling layers to the theme to be used by interaction. The layers include
* Apptentive defaults, and app/activity theme inheritance and app specific overrides.
*
* When the Apptentive fragments are hosted by ApptentiveViewActivity(by default), the value of theme attributes
* are obtained in the following order: ApptentiveTheme.Base.Versioned specified in Apptentive's AndroidManifest.xml ->
* app default theme specified in app AndroidManifest.xml (force) -> ApptentiveThemeOverride (force)
*
* @param interactionTheme The base theme to apply Apptentive styling layers
* @param context The context that will host Apptentive interaction fragment, either ApptentiveViewActivity
* or application context
*/
public void updateApptentiveInteractionTheme(Context context, Resources.Theme interactionTheme) {
/* Step 1: Apply Apptentive default theme layer.
* If host activity is an activity, the base theme already has Apptentive defaults applied, so skip Step 1.
* If parent activity is NOT an activity, first apply Apptentive defaults.
*/
if (!(context instanceof Activity)) {
// If host context is not an activity, i.e. application context, treat it as initial theme setup
interactionTheme.applyStyle(R.style.ApptentiveTheme_Base_Versioned, true);
}
// Step 2: Inherit app default appcompat theme if there is one specified in app's AndroidManifest
if (appDefaultAppCompatThemeId != 0) {
interactionTheme.applyStyle(appDefaultAppCompatThemeId, true);
}
// Step 3: Restore Apptentive UI window properties that may have been overridden in Step 2. This theme
// is to ensure Apptentive interaction has a modal feel-n-look.
interactionTheme.applyStyle(R.style.ApptentiveBaseFrameTheme, true);
// Step 4: Apply optional theme override specified in host app's style
int themeOverrideResId = context.getResources().getIdentifier("ApptentiveThemeOverride",
"style", getApplicationContext().getPackageName());
if (themeOverrideResId != 0) {
interactionTheme.applyStyle(themeOverrideResId, true);
}
// Step 5: Update status bar color
/* Obtain the default status bar color. When an Apptentive Modal interaction is shown,
* a translucent overlay would be applied on top of statusBarColorDefault
*/
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
int transparentColor = ContextCompat.getColor(context, android.R.color.transparent);
TypedArray a = interactionTheme.obtainStyledAttributes(new int[]{android.R.attr.statusBarColor});
try {
statusBarColorDefault = a.getColor(0, transparentColor);
} finally {
a.recycle();
}
}
// Step 6: Update toolbar overlay theme
int toolbarThemeId = Util.getResourceIdFromAttribute(interactionTheme, R.attr.apptentiveToolbarTheme);
apptentiveToolbarTheme.setTo(interactionTheme);
apptentiveToolbarTheme.applyStyle(toolbarThemeId, true);
}
private boolean start() {
checkConversationQueue();
boolean bRet = true;
/* If Message Center feature has never been used before, don't initialize message polling thread.
* Message Center feature will be seen as used, if one of the following conditions has been met:
* 1. Message Center has been opened for the first time
* 2. The first Push is received which would open Message Center
* 3. An unreadMessageCountListener() is set up
*/
boolean conversationLoaded = conversationManager.loadActiveConversation(getApplicationContext());
if (!conversationLoaded) {
ApptentiveLog.w(CONVERSATION, "There is no active conversation. The SDK will be disabled until a conversation becomes active.");
}
apptentiveToolbarTheme = appContext.getResources().newTheme();
try {
appPackageName = appContext.getPackageName();
PackageManager packageManager = appContext.getPackageManager();
PackageInfo packageInfo = packageManager.getPackageInfo(appPackageName, PackageManager.GET_META_DATA | PackageManager.GET_RECEIVERS);
ApplicationInfo ai = packageInfo.applicationInfo;
// Used for application theme inheritance if the theme is an AppCompat theme.
setApplicationDefaultTheme(ai.theme);
defaultAppDisplayName = packageManager.getApplicationLabel(packageManager.getApplicationInfo(packageInfo.packageName, 0)).toString();
// Prevent delayed run-time exception if the app upgrades from pre-2.0 and doesn't remove NetworkStateReceiver from manifest
ActivityInfo[] registered = packageInfo.receivers;
if (registered != null) {
for (ActivityInfo activityInfo : registered) {
// Throw assertion error when relict class found in manifest.
if (activityInfo.name.equals("com.apptentive.android.sdk.comm.NetworkStateReceiver")) {
throw new AssertionError("NetworkStateReceiver has been removed from Apptentive SDK, please make sure it's also removed from manifest file"); // TODO: should be IllegalStateException or similar
}
}
}
// Try initialize Apptimize SDK support
tryInitializeApptimizeSDK();
} catch (Exception e) {
ApptentiveLog.e(e, "Unexpected error while reading application or package info.");
logException(e);
bRet = false;
}
ApptentiveLog.v("Application Info:\n\tApptentive Key: %s\n\tApptentive Key: %s\n\tDebuggable APK: %b\n\tDefault locale: %s", apptentiveKey, apptentiveSignature, appRelease.isDebug(), Locale.getDefault());
return bRet;
}
private void checkSendVersionChanges(Conversation conversation) {
if (conversation == null) {
ApptentiveLog.e(CONVERSATION, "Can't check session data changes: session data is not initialized");
return;
}
boolean appReleaseChanged = false;
boolean sdkChanged = false;
final VersionHistoryItem lastVersionItemSeen = conversation.getVersionHistory().getLastVersionSeen();
final int currentVersionCode = appRelease.getVersionCode();
final String currentVersionName = appRelease.getVersionName();
Integer previousVersionCode = null;
String previousVersionName = null;
if (lastVersionItemSeen == null) {
appReleaseChanged = true;
} else {
previousVersionCode = lastVersionItemSeen.getVersionCode();
Apptentive.Version lastSeenVersionNameVersion = new Apptentive.Version();
previousVersionName = lastVersionItemSeen.getVersionName();
lastSeenVersionNameVersion.setVersion(previousVersionName);
if (!(ObjectUtils.equal(currentVersionCode, previousVersionCode)) || !currentVersionName.equals(lastSeenVersionNameVersion.getVersion())) {
appReleaseChanged = true;
}
}
// TODO: Move this into a session became active handler.
final String lastSeenSdkVersion = conversation.getLastSeenSdkVersion();
final String currentSdkVersion = Constants.getApptentiveSdkVersion();
if (!StringUtils.equal(lastSeenSdkVersion, currentSdkVersion)) {
sdkChanged = true;
}
if (appReleaseChanged) {
ApptentiveLog.i(CONVERSATION, "Application version was changed: Name: %s => %s, Code: %d => %d", previousVersionName, currentVersionName, previousVersionCode, currentVersionCode);
conversation.getVersionHistory().updateVersionHistory(Util.currentTimeSeconds(), currentVersionCode, currentVersionName);
}
Sdk sdk = SdkManager.generateCurrentSdk(appContext);
if (sdkChanged) {
ApptentiveLog.i(CONVERSATION, "SDK version was changed: %s => %s", lastSeenSdkVersion, currentSdkVersion);
conversation.setLastSeenSdkVersion(currentSdkVersion);
conversation.setSdk(sdk);
}
if (appReleaseChanged || sdkChanged) {
conversation.addPayload(AppReleaseManager.getPayload(sdk, appRelease));
conversation.setAppRelease(appRelease);
conversation.setSdk(sdk);
invalidateCaches(conversation);
}
}
/**
* We want to make sure the app is using the latest configuration from the server if the app or sdk version changes.
*/
private void invalidateCaches(Conversation conversation) {
checkConversationQueue();
conversation.setInteractionExpiration(0L);
Configuration config = Configuration.load();
config.setConfigurationCacheExpirationMillis(System.currentTimeMillis());
config.save();
}
public IRatingProvider getRatingProvider() {
if (ratingProvider == null) {
ratingProvider = new GooglePlayRatingProvider();
}
return ratingProvider;
}
@Override
public void setRatingProvider(@NonNull IRatingProvider ratingProvider) {
this.ratingProvider = ratingProvider;
}
public Map<String, String> getRatingProviderArgs() {
return ratingProviderArgs;
}
@Override
public void putRatingProviderArg(@NonNull String key, String value) {
if (ratingProviderArgs == null) {
ratingProviderArgs = new HashMap<>();
}
ratingProviderArgs.put(key, value);
}
@Override
public void setOnSurveyFinishedListener(OnSurveyFinishedListener onSurveyFinishedListener) {
if (onSurveyFinishedListener != null) {
this.onSurveyFinishedListener = new WeakReference<>(onSurveyFinishedListener);
} else {
this.onSurveyFinishedListener = null;
}
}
@Override
public OnSurveyFinishedListener getOnSurveyFinishedListener() {
return (onSurveyFinishedListener == null) ? null : onSurveyFinishedListener.get();
}
@Override
public void addInteractionUpdateListener(InteractionManager.InteractionUpdateListener listener) {
interactionUpdateListeners.add(listener);
}
@Override
public void removeInteractionUpdateListener(InteractionManager.InteractionUpdateListener listener) {
interactionUpdateListeners.remove(listener);
}
public void setAuthenticationFailedListener(Apptentive.AuthenticationFailedListener listener) {
authenticationFailedListenerRef = new WeakReference<>(listener);
}
public void notifyAuthenticationFailedListener(final Apptentive.AuthenticationFailedReason reason, String conversationIdOfFailedRequest) {
checkConversationQueue();
if (isConversationActive()) {
String activeConversationId = getConversation().getConversationId();
if (StringUtils.equal(activeConversationId, conversationIdOfFailedRequest)) {
final Apptentive.AuthenticationFailedListener listener = authenticationFailedListenerRef != null ? authenticationFailedListenerRef.get() : null;
if (listener != null) {
// we need to dispatch listener on the main queue
DispatchQueue.mainQueue().dispatchAsync(new DispatchTask() {
@Override
protected void execute() {
listener.onAuthenticationFailed(reason);
}
});
}
}
}
}
/**
* The key that is used to store extra data on an Apptentive push notification.
*/
static final String APPTENTIVE_PUSH_EXTRA_KEY = "apptentive";
static final String PUSH_EXTRA_KEY_PARSE = "com.parse.Data";
static final String PUSH_EXTRA_KEY_UA = "com.urbanairship.push.EXTRA_PUSH_MESSAGE_BUNDLE";
static final String TITLE_DEFAULT = "title";
static final String BODY_DEFAULT = "body";
static final String BODY_PARSE = "alert";
static final String BODY_UA = "com.urbanairship.push.ALERT";
static String getApptentivePushNotificationData(Intent intent) {
if (intent != null) {
ApptentiveLog.v(PUSH, "Got an Intent.");
return getApptentivePushNotificationData(intent.getExtras());
}
return null;
}
/**
* <p>Internal use only.</p>
* This bundle could be any bundle sent to us by a push Intent from any supported platform. For that reason, it needs to be checked in multiple ways.
*
* @param pushBundle a Bundle, or null.
* @return a String, or null.
*/
static String getApptentivePushNotificationData(Bundle pushBundle) {
if (pushBundle != null) {
if (pushBundle.containsKey(PUSH_EXTRA_KEY_PARSE)) { // Parse
ApptentiveLog.v(PUSH, "Got a Parse Push.");
String parseDataString = pushBundle.getString(PUSH_EXTRA_KEY_PARSE);
if (parseDataString == null) {
ApptentiveLog.e(PUSH, "com.parse.Data is null.");
return null;
}
try {
JSONObject parseJson = new JSONObject(parseDataString);
return parseJson.optString(APPTENTIVE_PUSH_EXTRA_KEY, null);
} catch (JSONException e) {
ApptentiveLog.e(PUSH, "com.parse.Data is corrupt: %s", parseDataString);
logException(e);
return null;
}
} else if (pushBundle.containsKey(PUSH_EXTRA_KEY_UA)) { // Urban Airship
ApptentiveLog.v(PUSH, "Got an Urban Airship push.");
Bundle uaPushBundle = pushBundle.getBundle(PUSH_EXTRA_KEY_UA);
if (uaPushBundle == null) {
ApptentiveLog.e(PUSH, "Urban Airship push extras bundle is null");
return null;
}
return uaPushBundle.getString(APPTENTIVE_PUSH_EXTRA_KEY);
} else if (pushBundle.containsKey(APPTENTIVE_PUSH_EXTRA_KEY)) { // All others
// Straight FCM / GCM / SNS, or nested
ApptentiveLog.v(PUSH, "Found apptentive push data.");
return pushBundle.getString(APPTENTIVE_PUSH_EXTRA_KEY);
} else {
ApptentiveLog.e(PUSH, "Got an unrecognizable push.");
}
}
ApptentiveLog.e(PUSH, "Push bundle was null.");
return null;
}
static String getApptentivePushNotificationData(Map<String, String> pushData) {
if (pushData != null) {
return pushData.get(APPTENTIVE_PUSH_EXTRA_KEY);
}
return null;
}
public void showAboutInternal(Context context, boolean showBrandingBand) {
Intent intent = new Intent();
intent.setClass(context, ApptentiveViewActivity.class);
intent.putExtra(Constants.FragmentConfigKeys.TYPE, Constants.FragmentTypes.ABOUT);
intent.putExtra(Constants.FragmentConfigKeys.EXTRA, showBrandingBand);
if (!(context instanceof Activity)) {
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_MULTIPLE_TASK);
}
context.startActivity(intent);
}
/**
* TODO: Decouple this from Conversation and Message Manager so it can be unit tested.
*/
static PendingIntent generatePendingIntentFromApptentivePushData(Conversation conversation, String apptentivePushData) {
ApptentiveLog.d(PUSH, "Generating Apptentive push PendingIntent.");
if (!TextUtils.isEmpty(apptentivePushData)) {
try {
JSONObject pushJson = new JSONObject(apptentivePushData);
// we need to check if current user is actually the receiver of this notification
final String conversationId = pushJson.optString(PUSH_CONVERSATION_ID, null);
if (conversationId != null) {
// is it an actual receiver?
if (!StringUtils.equal(conversation.getConversationId(), conversationId)) {
ApptentiveLog.i(PUSH, "Can't generate pending intent from Apptentive push data: push conversation id doesn't match active conversation");
return null;
}
}
ApptentiveInternal.PushAction action = ApptentiveInternal.PushAction.unknown;
if (pushJson.has(ApptentiveInternal.PUSH_ACTION)) {
action = ApptentiveInternal.PushAction.parse(pushJson.getString(ApptentiveInternal.PUSH_ACTION));
}
switch (action) {
case pmc: {
// Prefetch message when push for message center is received
MessageManager mgr = conversation.getMessageManager();
if (mgr != null) {
mgr.startMessagePreFetchTask();
}
// Construct a pending intent to launch message center
return ApptentiveInternal.prepareMessageCenterPendingIntent(ApptentiveInternal.getInstance().getApplicationContext(), conversation);
}
default:
ApptentiveLog.w(PUSH, "Unknown Apptentive push notification action: \"%s\"", action.name());
}
} catch (Exception e) {
ApptentiveLog.e(PUSH, e, "Error parsing JSON from push notification.");
logException(e);
}
}
return null;
}
@Override
public boolean showMessageCenterInternal(@NonNull Context context, Map<String, Object> customData) {
boolean interactionShown = false;
if (canShowMessageCenterInternal()) {
if (customData != null) {
Iterator<String> keysIterator = customData.keySet().iterator();
while (keysIterator.hasNext()) {
String key = keysIterator.next();
Object value = customData.get(key);
if (value != null) {
if (!(value instanceof String ||
value instanceof Boolean ||
value instanceof Long ||
value instanceof Double ||
value instanceof Float ||
value instanceof Integer ||
value instanceof Short)) {
ApptentiveLog.w(MESSAGES, "Removing invalid customData type: %s", value.getClass().getSimpleName());
keysIterator.remove();
}
}
}
}
this.customData = customData;
interactionShown = engageInternal(context, MessageCenterInteraction.DEFAULT_INTERNAL_EVENT_NAME);
if (!interactionShown) {
this.customData = null;
}
} else {
showMessageCenterFallback(context);
}
return interactionShown;
}
public void showMessageCenterFallback(Context context) {
EngagementModule.launchMessageCenterErrorActivity(context);
}
// TODO: remove this method
private boolean canShowMessageCenterInternal() {
Conversation conversation = getConversation();
return conversation != null && canShowMessageCenterInternal(conversation);
}
public static boolean canShowMessageCenterInternal(Conversation conversation) {
return EngagementModule.canShowInteraction(conversation, "app", MessageCenterInteraction.DEFAULT_INTERNAL_EVENT_NAME, "com.apptentive");
}
public Map<String, Object> getAndClearCustomData() {
Map<String, Object> customData = this.customData;
this.customData = null;
return customData;
}
public void resetSdkState() {
globalSharedPrefs.edit().clear().apply();
taskManager.reset(appContext);
}
public void notifyInteractionUpdated(boolean successful) {
checkConversationQueue();
ApptentiveNotificationCenter.defaultCenter()
.postNotification(NOTIFICATION_INTERACTIONS_DID_FETCH,
NOTIFICATION_KEY_SUCCESSFUL, successful);
Iterator it = interactionUpdateListeners.iterator();
while (it.hasNext()) {
InteractionManager.InteractionUpdateListener listener = (InteractionManager.InteractionUpdateListener) it.next();
if (listener != null) {
listener.onInteractionUpdated(successful);
}
}
}
public static PendingIntent prepareMessageCenterPendingIntent(Context context, Conversation conversation) {
Intent intent;
if (canShowMessageCenterInternal(conversation)) {
intent = new Intent();
intent.setClass(context, ApptentiveViewActivity.class);
intent.putExtra(Constants.FragmentConfigKeys.TYPE, Constants.FragmentTypes.ENGAGE_INTERNAL_EVENT);
intent.putExtra(Constants.FragmentConfigKeys.EXTRA, MessageCenterInteraction.DEFAULT_INTERNAL_EVENT_NAME);
} else {
intent = MessageCenterInteraction.generateMessageCenterErrorIntent(context);
}
return (intent != null) ? PendingIntent.getActivity(context, 0, intent,
PendingIntent.FLAG_ONE_SHOT | PendingIntent.FLAG_UPDATE_CURRENT) : null;
}
/**
* Checks to see if Apptentive was properly registered, and logs a message if not.
*
* @return true if properly registered, else false.
*/
static boolean checkRegistered() {
if (!ApptentiveInternal.isApptentiveRegistered()) {
ApptentiveLog.e(CONVERSATION, "Apptentive SDK is not initialized.");
return false;
}
return true;
}
//region Helpers
private String getEndpointBase(SharedPreferences prefs) {
String url = prefs.getString(Constants.PREF_KEY_SERVER_URL, null);
if (url == null) {
url = Constants.CONFIG_DEFAULT_SERVER_URL;
prefs.edit().putString(Constants.PREF_KEY_SERVER_URL, url).apply();
}
return url;
}
//endregion
//region Login/Logout
/**
* Flag indicating if login request is currently active (used to avoid multiple competing
* requests
*/
private boolean loginInProgress;
/**
* Mutex object for synchronizing login request flag
*/
private final Object loginMutex = new Object();
@Override
public void login(String token, final LoginCallback callback) {
synchronized (loginMutex) {
if (loginInProgress) {
if (callback != null) {
callback.onLoginFail("Another login request is currently in progress");
}
return;
}
loginInProgress = true;
LoginCallback wrapperCallback = new LoginCallback() {
@Override
public void onLoginFinish() {
synchronized (loginMutex) {
assertTrue(loginInProgress);
try {
engageInternal(getApplicationContext(), "login");
if (callback != null) {
callback.onLoginFinish();
}
} finally {
loginInProgress = false;
}
}
}
@Override
public void onLoginFail(String errorMessage) {
synchronized (loginMutex) {
assertTrue(loginInProgress);
try {
if (callback != null) {
callback.onLoginFail(errorMessage);
}
} finally {
loginInProgress = false;
}
}
}
};
conversationManager.login(token, wrapperCallback);
}
}
@Override
public void logout() {
conversationManager.logout();
}
//endregion
//region Apptimize SDK
private void tryInitializeApptimizeSDK() {
checkConversationQueue();
// TODO: figure out how to register a listener
tryUpdateApptimizeData();
}
void tryUpdateApptimizeData() {
checkConversationQueue();
Configuration configuration = Configuration.load();
if (!configuration.isCollectingApptimizeData()) {
return;
}
Conversation conversation = getConversation();
if (conversation == null) {
ApptentiveLog.w(PARTNERS, "Unable to update Apptimize data: no active conversation");
return;
}
if (!ApptentiveApptimize.isApptimizeSDKAvailable()) {
ApptentiveLog.w(PARTNERS, "Unable to initialize Apptimize SDK support: SDK integration not found");
return;
}
if (!ApptentiveApptimize.isSupportedLibraryVersion()) {
ApptentiveLog.w(PARTNERS, "Unable to update Apptimize data: unsupported library version '%s'", ApptentiveApptimize.getLibraryVersion());
return;
}
Map<String, ApptentiveApptimizeTestInfo> experiments = ApptentiveApptimize.getTestInfo();
if (experiments == null || experiments.size() == 0) {
ApptentiveLog.w(PARTNERS, "Unable to update Apptimize data: no experiments");
return;
}
for (ApptentiveApptimizeTestInfo experiment : experiments.values()) {
if (experiment == null) {
continue;
}
String testName = experiment.getTestName();
String variantName = experiment.getEnrolledVariantName();
String participationState = experiment.userHasParticipated() ? "participated" : "enrolled";
String key = StringUtils.format("Apptimize: %s %s", testName, participationState);
conversation.getDevice().getCustomData().put(key, variantName);
}
}
//endregion
/**
* Dismisses any currently-visible interactions. This method is for internal use and is subject to change.
*/
public static void dismissAllInteractions() {
if (!isConversationQueue()) {
dispatchOnConversationQueue(new DispatchTask() {
@Override
protected void execute() {
dismissAllInteractions();
}
});
return;
}
ApptentiveNotificationCenter.defaultCenter().postNotification(NOTIFICATION_INTERACTIONS_SHOULD_DISMISS);
}
@Override
public void onReceiveNotification(ApptentiveNotification notification) {
checkConversationQueue();
if (notification.hasName(NOTIFICATION_CONVERSATION_STATE_DID_CHANGE)) {
Conversation conversation = notification.getRequiredUserInfo(NOTIFICATION_KEY_CONVERSATION, Conversation.class);
if (conversation.hasActiveState()) {
// if conversation was just created - start a new session
if (!conversation.hasSession()) {
conversation.startSession();
}
checkSendVersionChanges(conversation);
}
} else if (notification.hasName(NOTIFICATION_CONVERSATION_WILL_LOGOUT)) {
Conversation conversation = notification.getRequiredUserInfo(NOTIFICATION_KEY_CONVERSATION, Conversation.class);
conversation.addPayload(new LogoutPayload());
} else if (notification.hasName(NOTIFICATION_AUTHENTICATION_FAILED)) {
String conversationIdOfFailedRequest = notification.getUserInfo(NOTIFICATION_KEY_CONVERSATION_ID, String.class);
Apptentive.AuthenticationFailedReason authenticationFailedReason = notification.getUserInfo(NOTIFICATION_KEY_AUTHENTICATION_FAILED_REASON, Apptentive.AuthenticationFailedReason.class);
notifyAuthenticationFailedListener(authenticationFailedReason, conversationIdOfFailedRequest);
} else if (notification.hasName(NOTIFICATION_INTERACTION_MANIFEST_FETCHED)) {
String manifest = notification.getRequiredUserInfo(NOTIFICATION_KEY_MANIFEST, String.class);
storeManifestResponse(appContext, manifest);
} else if (notification.hasName(NOTIFICATION_APP_ENTERED_FOREGROUND)) {
onAppEnterForeground();
} else if (notification.hasName(NOTIFICATION_APP_ENTERED_BACKGROUND)) {
onAppEnterBackground();
} else if (notification.hasName(NOTIFICATION_CONFIGURATION_FETCH_DID_FINISH)) {
Configuration configuration = notification.getUserInfo(NOTIFICATION_KEY_CONFIGURATION, Configuration.class);
if (configuration == null) {
return;
}
if (configuration.isCollectingApptimizeData()) {
tryUpdateApptimizeData();
}
}
}
//region Engagement
private boolean engageInternal(Context context, String eventName) {
Conversation conversation = getConversation();
assertNotNull(conversation, "Attempted to engage '%s' internal event without an active conversation", eventName);
return conversation != null && EngagementModule.engageInternal(context, conversation, eventName);
}
//endregion
//region Engagement Manifest Data
private void storeManifestResponse(Context context, String manifest) {
try {
File file = new File(ApptentiveLog.getLogsDirectory(context), Constants.FILE_APPTENTIVE_ENGAGEMENT_MANIFEST);
Util.writeText(file, manifest);
} catch (Exception e) {
ApptentiveLog.e(CONVERSATION, e, "Exception while trying to save engagement manifest data");
logException(e);
}
}
//endregion
//region Error Reporting
private static void logException(Exception e) {
ErrorMetrics.logException(e); // TODO: add more context info
}
//endregion
//region Android ID
private static final String PREFS_NAME_ANDROID_ID = "com.apptentive.sdk.androidID";
private static final String PREFS_KEY_NAME_ANDROID_ID = "androidID";
private static String resolveAndroidID(Context context, boolean shouldGenerateRandomAndroidID) {
if (shouldGenerateRandomAndroidID) {
String existingAndroidID = loadAndroidID(context);
if (existingAndroidID != null) {
return existingAndroidID;
}
String androidID = StringUtils.randomAndroidID();
saveAndroidID(context, androidID);
return androidID;
}
return Util.getAndroidID(context);
}
private static String loadAndroidID(Context context) {
SharedPreferences sharedPreferences = context.getSharedPreferences(PREFS_NAME_ANDROID_ID, Context.MODE_PRIVATE);
return sharedPreferences.getString(PREFS_KEY_NAME_ANDROID_ID, null);
}
private static void saveAndroidID(Context context, String androidID) {
SharedPreferences sharedPreferences = context.getSharedPreferences(PREFS_NAME_ANDROID_ID, Context.MODE_PRIVATE);
sharedPreferences.edit().putString(PREFS_KEY_NAME_ANDROID_ID, androidID).apply();
}
//endregion
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.shared.kerberos.codec;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.fail;
import java.nio.ByteBuffer;
import org.apache.directory.api.asn1.DecoderException;
import org.apache.directory.api.asn1.EncoderException;
import org.apache.directory.api.asn1.ber.Asn1Container;
import org.apache.directory.api.asn1.ber.Asn1Decoder;
import org.apache.directory.api.util.Strings;
import org.apache.directory.shared.kerberos.KerberosTime;
import org.apache.directory.shared.kerberos.codec.kdcReqBody.KdcReqBodyContainer;
import org.apache.directory.shared.kerberos.codec.options.KdcOptions;
import org.apache.directory.shared.kerberos.codec.types.EncryptionType;
import org.apache.directory.shared.kerberos.codec.types.HostAddrType;
import org.apache.directory.shared.kerberos.codec.types.PrincipalNameType;
import org.apache.directory.shared.kerberos.components.EncryptedData;
import org.apache.directory.shared.kerberos.components.HostAddress;
import org.apache.directory.shared.kerberos.components.HostAddresses;
import org.apache.directory.shared.kerberos.components.KdcReqBody;
import org.apache.directory.shared.kerberos.components.PrincipalName;
import org.apache.directory.shared.kerberos.messages.Ticket;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
/**
* Test the decoder for a KdcReqBody
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
public class KdcReqBodyDecoderTest
{
/**
* Test the decoding of a KdcReqBody message
*/
@Test
public void testDecodeFullKdcReqBody() throws Exception
{
ByteBuffer stream = ByteBuffer.allocate( 0x15B );
stream.put( new byte[]
{
0x30, ( byte ) 0x82, 0x01, 0x57,
( byte ) 0xA0, 0x07,
0x03, 0x05,
0x00, 0x01, 0x04, 0x00, 0x32,
( byte ) 0xA1, 0x13,
0x30, 0x11,
( byte ) 0xA0, 0x03,
0x02, 0x01, 0x0A,
( byte ) 0xA1, 0x0A,
0x30, 0x08,
0x1B, 0x06,
'c', 'l', 'i', 'e', 'n', 't',
( byte ) 0xA2, 0x0D,
0x1B, 0x0B,
'E', 'X', 'A', 'M', 'P', 'L', 'E', '.', 'C', 'O', 'M',
( byte ) 0xA3, 0x13,
0x30, 0x11,
( byte ) 0xA0, 0x03,
0x02, 0x01, 0x0A,
( byte ) 0xA1, 0x0A,
0x30, 0x08,
0x1B, 0x06,
's', 'e', 'r', 'v', 'e', 'r',
( byte ) 0xA4, 0x11,
0x18, 0x0F,
'2', '0', '1', '0', '1', '1', '1', '0', '1', '5', '4', '5', '2', '5', 'Z',
( byte ) 0xA5, 0x11,
0x18, 0x0F,
'2', '0', '1', '0', '1', '1', '1', '0', '1', '5', '4', '5', '2', '5', 'Z',
( byte ) 0xA6, 0x11,
0x18, 0x0F,
'2', '0', '1', '0', '1', '1', '1', '0', '1', '5', '4', '5', '2', '5', 'Z',
( byte ) 0xA7, 0x04,
0x02, 0x02,
0x30, 0x39,
( byte ) 0xA8, 0x0B,
0x30, 0x09,
0x02, 0x01, 0x06,
0x02, 0x01, 0x11,
0x02, 0x01, 0x12,
( byte ) 0xA9, 0x2E,
0x30, 0x2C,
0x30, 0x14,
( byte ) 0xA0, 0x03,
0x02, 0x01, 0x02,
( byte ) 0xA1, 0x0D,
0x04, 0x0B,
'1', '9', '2', '.', '1', '6', '8', '.', '0', '.', '1',
0x30, 0x14,
( byte ) 0xA0, 0x03,
0x02, 0x01, 0x02,
( byte ) 0xA1, 0x0D,
0x04, 0x0B,
'1', '9', '2', '.', '1', '6', '8', '.', '0', '.', '2',
( byte ) 0xAA, 0x11,
0x30, 0x0F,
( byte ) 0xA0, 0x03,
0x02, 0x01, 0x11,
( byte ) 0xA2, 0x08,
0x04, 0x06,
'a', 'b', 'c', 'd', 'e', 'f',
( byte ) 0xAB, ( byte ) 0x81, ( byte ) 0x83,
0x30, ( byte ) 0x81, ( byte ) 0x80,
0x61, 0x3E,
0x30, 0x3C,
( byte ) 0xA0, 0x03,
0x02, 0x01, 0x05,
( byte ) 0xA1, 0x0D,
0x1B, 0x0B,
'E', 'X', 'A', 'M', 'P', 'L', 'E', '.', 'C', 'O', 'M',
( byte ) 0xA2, 0x13,
0x30, 0x11,
( byte ) 0xA0, 0x03,
0x02, 0x01, 0x01,
( byte ) 0xA1, 0x0A,
0x30, 0x08,
0x1B, 0x06,
'c', 'l', 'i', 'e', 'n', 't',
( byte ) 0xA3, 0x11,
0x30, 0x0F,
( byte ) 0xA0, 0x03,
0x02, 0x01, 0x11,
( byte ) 0xA2, 0x08,
0x04, 0x06,
'a', 'b', 'c', 'd', 'e', 'f',
0x61, 0x3E,
0x30, 0x3C,
( byte ) 0xA0, 0x03,
0x02, 0x01, 0x05,
( byte ) 0xA1, 0x0D,
0x1B, 0x0B,
'E', 'X', 'A', 'M', 'P', 'L', 'E', '.', 'C', 'O', 'M',
( byte ) 0xA2, 0x13,
0x30, 0x11,
( byte ) 0xA0, 0x03,
0x02, 0x01, 0x01,
( byte ) 0xA1, 0x0A,
0x30, 0x08,
0x1B, 0x06,
's', 'e', 'r', 'v', 'e', 'r',
( byte ) 0xA3, 0x11,
0x30, 0x0F,
( byte ) 0xA0, 0x03,
0x02, 0x01, 0x11,
( byte ) 0xA2, 0x08,
0x04, 0x06,
'a', 'b', 'c', 'd', 'e', 'f'
} );
stream.flip();
// Allocate a KdcReqBody Container
Asn1Container kdcReqBodyContainer = new KdcReqBodyContainer( stream );
// Decode the KdcReqBody PDU
try
{
Asn1Decoder.decode( stream, kdcReqBodyContainer );
}
catch ( DecoderException de )
{
fail( de.getMessage() );
}
KdcReqBody body = new KdcReqBody();
body.setKdcOptions( new KdcOptions( new byte[]
{ 0x00, 0x01, 0x04, 0x00, 0x32 } ) );
body.setCName( new PrincipalName( "client", PrincipalNameType.KRB_NT_ENTERPRISE ) );
body.setRealm( "EXAMPLE.COM" );
body.setSName( new PrincipalName( "server", PrincipalNameType.KRB_NT_ENTERPRISE ) );
body.setFrom( new KerberosTime( System.currentTimeMillis() ) );
body.setTill( new KerberosTime( System.currentTimeMillis() ) );
body.setRtime( new KerberosTime( System.currentTimeMillis() ) );
body.setNonce( 12345 );
body.addEType( EncryptionType.AES256_CTS_HMAC_SHA1_96 );
body.addEType( EncryptionType.DES3_CBC_MD5 );
body.addEType( EncryptionType.AES128_CTS_HMAC_SHA1_96 );
HostAddresses addresses = new HostAddresses();
addresses.addHostAddress(
new HostAddress( HostAddrType.ADDRTYPE_INET, Strings.getBytesUtf8( "192.168.0.1" ) ) );
addresses.addHostAddress(
new HostAddress( HostAddrType.ADDRTYPE_INET, Strings.getBytesUtf8( "192.168.0.2" ) ) );
body.setAddresses( addresses );
EncryptedData encAuthorizationData = new EncryptedData( EncryptionType.AES128_CTS_HMAC_SHA1_96,
Strings.getBytesUtf8( "abcdef" ) );
body.setEncAuthorizationData( encAuthorizationData );
Ticket ticket1 = new Ticket();
ticket1.setTktVno( 5 );
ticket1.setRealm( "EXAMPLE.COM" );
ticket1.setSName( new PrincipalName( "client", PrincipalNameType.KRB_NT_PRINCIPAL ) );
ticket1.setEncPart(
new EncryptedData( EncryptionType.AES128_CTS_HMAC_SHA1_96, Strings.getBytesUtf8( "abcdef" ) ) );
body.addAdditionalTicket( ticket1 );
Ticket ticket2 = new Ticket();
ticket2.setTktVno( 5 );
ticket2.setRealm( "EXAMPLE.COM" );
ticket2.setSName( new PrincipalName( "server", PrincipalNameType.KRB_NT_PRINCIPAL ) );
ticket2.setEncPart(
new EncryptedData( EncryptionType.AES128_CTS_HMAC_SHA1_96, Strings.getBytesUtf8( "abcdef" ) ) );
body.addAdditionalTicket( ticket2 );
// Check the encoding
int length = body.computeLength();
// Check the length
assertEquals( 0x15B, length );
// Check the encoding
ByteBuffer encodedPdu = ByteBuffer.allocate( length );
try
{
encodedPdu = body.encode( encodedPdu );
// Check the length
assertEquals( 0x15B, encodedPdu.limit() );
}
catch ( EncoderException ee )
{
fail();
}
}
/**
* Test the decoding of a KDC-REQ-BODY with nothing in it
*/
@Test
public void testKdcReqBodyEmpty() throws DecoderException
{
ByteBuffer stream = ByteBuffer.allocate( 0x02 );
stream.put( new byte[]
{ 0x30, 0x00 } );
stream.flip();
// Allocate a KDC-REQ-BODY Container
Asn1Container kdcReqBodyContainer = new KdcReqBodyContainer( stream );
// Decode the KDC-REQ-BODY PDU
Assertions.assertThrows( DecoderException.class, () -> {
Asn1Decoder.decode(stream, kdcReqBodyContainer);
} );
}
/**
* Test the decoding of a KDC-REQ-BODY with empty options tag
*/
@Test
public void testKdcReqBodyEmptyOptionTag() throws DecoderException
{
ByteBuffer stream = ByteBuffer.allocate( 0x04 );
stream.put( new byte[]
{
0x30, 0x02,
( byte ) 0xA0, 0x00
} );
stream.flip();
// Allocate a KDC-REQ-BODY Container
Asn1Container kdcReqBodyContainer = new KdcReqBodyContainer( stream );
// Decode the KDC-REQ-BODY PDU
Assertions.assertThrows( DecoderException.class, () -> {
Asn1Decoder.decode(stream, kdcReqBodyContainer);
} );
}
/**
* Test the decoding of a KDC-REQ-BODY with empty options value
*/
@Test
public void testKdcReqBodyEmptyOptionValue() throws DecoderException
{
ByteBuffer stream = ByteBuffer.allocate( 0x06 );
stream.put( new byte[]
{
0x30, 0x04,
( byte ) 0xA0, 0x02,
0x02, 0x00
} );
stream.flip();
// Allocate a KDC-REQ-BODY Container
Asn1Container kdcReqBodyContainer = new KdcReqBodyContainer( stream );
// Decode the KDC-REQ-BODY PDU
Assertions.assertThrows( DecoderException.class, () -> {
Asn1Decoder.decode(stream, kdcReqBodyContainer);
} );
}
/**
* Test the decoding of a KDC-REQ-BODY with no options
*/
@Test
public void testKdcReqBodyNoOptions() throws DecoderException
{
ByteBuffer stream = ByteBuffer.allocate( 0x152 );
stream.put( new byte[]
{
0x30, ( byte ) 0x82, 0x01, 0x4E,
( byte ) 0xA1, 0x13,
0x30, 0x11,
( byte ) 0xA0, 0x03,
0x02, 0x01, 0x0A,
( byte ) 0xA1, 0x0A,
0x30, 0x08,
0x1B, 0x06,
'c', 'l', 'i', 'e', 'n', 't',
( byte ) 0xA2, 0x0D,
0x1B, 0x0B,
'E', 'X', 'A', 'M', 'P', 'L', 'E', '.', 'C', 'O', 'M',
( byte ) 0xA3, 0x13,
0x30, 0x11,
( byte ) 0xA0, 0x03,
0x02, 0x01, 0x0A,
( byte ) 0xA1, 0x0A,
0x30, 0x08,
0x1B, 0x06,
's', 'e', 'r', 'v', 'e', 'r',
( byte ) 0xA4, 0x11,
0x18, 0x0F,
'2', '0', '1', '0', '1', '1', '1', '0', '1', '5', '4', '5', '2', '5', 'Z',
( byte ) 0xA5, 0x11,
0x18, 0x0F,
'2', '0', '1', '0', '1', '1', '1', '0', '1', '5', '4', '5', '2', '5', 'Z',
( byte ) 0xA6, 0x11,
0x18, 0x0F,
'2', '0', '1', '0', '1', '1', '1', '0', '1', '5', '4', '5', '2', '5', 'Z',
( byte ) 0xA7, 0x04,
0x02, 0x02,
0x30, 0x39,
( byte ) 0xA8, 0x0B,
0x30, 0x09,
0x02, 0x01, 0x06,
0x02, 0x01, 0x11,
0x02, 0x01, 0x12,
( byte ) 0xA9, 0x2E,
0x30, 0x2C,
0x30, 0x14,
( byte ) 0xA0, 0x03,
0x02, 0x01, 0x02,
( byte ) 0xA1, 0x0D,
0x04, 0x0B,
'1', '9', '2', '.', '1', '6', '8', '.', '0', '.', '1',
0x30, 0x14,
( byte ) 0xA0, 0x03,
0x02, 0x01, 0x02,
( byte ) 0xA1, 0x0D,
0x04, 0x0B,
'1', '9', '2', '.', '1', '6', '8', '.', '0', '.', '2',
( byte ) 0xAA, 0x11,
0x30, 0x0F,
( byte ) 0xA0, 0x03,
0x02, 0x01, 0x11,
( byte ) 0xA2, 0x08,
0x04, 0x06,
'a', 'b', 'c', 'd', 'e', 'f',
( byte ) 0xAB, ( byte ) 0x81, ( byte ) 0x83,
0x30, ( byte ) 0x81, ( byte ) 0x80,
0x61, 0x3E,
0x30, 0x3C,
( byte ) 0xA0, 0x03,
0x02, 0x01, 0x05,
( byte ) 0xA1, 0x0D,
0x1B, 0x0B,
'E', 'X', 'A', 'M', 'P', 'L', 'E', '.', 'C', 'O', 'M',
( byte ) 0xA2, 0x13,
0x30, 0x11,
( byte ) 0xA0, 0x03,
0x02, 0x01, 0x01,
( byte ) 0xA1, 0x0A,
0x30, 0x08,
0x1B, 0x06,
'c', 'l', 'i', 'e', 'n', 't',
( byte ) 0xA3, 0x11,
0x30, 0x0F,
( byte ) 0xA0, 0x03,
0x02, 0x01, 0x11,
( byte ) 0xA2, 0x08,
0x04, 0x06,
'a', 'b', 'c', 'd', 'e', 'f',
0x61, 0x3E,
0x30, 0x3C,
( byte ) 0xA0, 0x03,
0x02, 0x01, 0x05,
( byte ) 0xA1, 0x0D,
0x1B, 0x0B,
'E', 'X', 'A', 'M', 'P', 'L', 'E', '.', 'C', 'O', 'M',
( byte ) 0xA2, 0x13,
0x30, 0x11,
( byte ) 0xA0, 0x03,
0x02, 0x01, 0x01,
( byte ) 0xA1, 0x0A,
0x30, 0x08,
0x1B, 0x06,
's', 'e', 'r', 'v', 'e', 'r',
( byte ) 0xA3, 0x11,
0x30, 0x0F,
( byte ) 0xA0, 0x03,
0x02, 0x01, 0x11,
( byte ) 0xA2, 0x08,
0x04, 0x06,
'a', 'b', 'c', 'd', 'e', 'f'
} );
stream.flip();
// Allocate a KDC-REQ-BODY Container
Asn1Container kdcReqBodyContainer = new KdcReqBodyContainer( stream );
// Decode the KDC-REQ-BODY PDU
Assertions.assertThrows( DecoderException.class, () -> {
Asn1Decoder.decode(stream, kdcReqBodyContainer);
} );
}
/**
* Test the decoding of a KdcReqBody message with no optional value
* ( we only have options, realm, till, nonce and etype )
*/
@Test
public void testDecodeKdcReqBodyNoOptionalValue() throws Exception
{
ByteBuffer stream = ByteBuffer.allocate( 0x40 );
stream.put( new byte[]
{
0x30, ( byte ) 0x3E,
( byte ) 0xA0, 0x07,
0x03, 0x05,
0x00, 0x01, 0x04, 0x00, 0x32,
( byte ) 0xA2, 0x0D,
0x1B, 0x0B,
'E', 'X', 'A', 'M', 'P', 'L', 'E', '.', 'C', 'O', 'M',
( byte ) 0xA5, 0x11,
0x18, 0x0F,
'2', '0', '1', '0', '1', '1', '1', '0', '1', '5', '4', '5', '2', '5', 'Z',
( byte ) 0xA7, 0x04,
0x02, 0x02,
0x30, 0x39,
( byte ) 0xA8, 0x0B,
0x30, 0x09,
0x02, 0x01, 0x06,
0x02, 0x01, 0x11,
0x02, 0x01, 0x12
} );
Strings.dumpBytes( stream.array() );
stream.flip();
// Allocate a KdcReqBody Container
KdcReqBodyContainer kdcReqBodyContainer = new KdcReqBodyContainer( stream );
// Decode the KdcReqBody PDU
try
{
Asn1Decoder.decode( stream, kdcReqBodyContainer );
}
catch ( DecoderException de )
{
fail( de.getMessage() );
}
KdcReqBody body = kdcReqBodyContainer.getKdcReqBody();
assertNotNull( body );
// Check the encoding
ByteBuffer bb = ByteBuffer.allocate( body.computeLength() );
try
{
bb = body.encode( bb );
// Check the length
assertEquals( 0x40, bb.limit() );
String encodedPdu = Strings.dumpBytes( bb.array() );
//assertEquals( decodedPdu, encodedPdu );
}
catch ( EncoderException ee )
{
fail();
}
}
}
| |
/*
* The MIT License (MIT)
* Copyright (c) 2017. Sergio Leonardo Isasmendi
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package leoisasmendi.android.com.suricatepodcast.services;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.Service;
import android.appwidget.AppWidgetManager;
import android.content.BroadcastReceiver;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.drawable.Drawable;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.media.session.MediaSessionManager;
import android.os.Binder;
import android.os.IBinder;
import android.os.RemoteException;
import android.support.v4.content.ContextCompat;
import android.support.v4.media.MediaMetadataCompat;
import android.support.v4.media.session.MediaControllerCompat;
import android.support.v4.media.session.MediaSessionCompat;
import android.support.v7.app.NotificationCompat;
import android.telephony.PhoneStateListener;
import android.telephony.TelephonyManager;
import android.util.Log;
import android.widget.RemoteViews;
import com.squareup.picasso.Picasso;
import com.squareup.picasso.Target;
import leoisasmendi.android.com.suricatepodcast.MainActivity;
import leoisasmendi.android.com.suricatepodcast.R;
import leoisasmendi.android.com.suricatepodcast.data.PlaylistItem;
import leoisasmendi.android.com.suricatepodcast.provider.DataProvider;
import leoisasmendi.android.com.suricatepodcast.utils.ParserUtils;
import leoisasmendi.android.com.suricatepodcast.utils.PlaybackStatus;
import leoisasmendi.android.com.suricatepodcast.utils.StorageUtil;
import leoisasmendi.android.com.suricatepodcast.widget.PodcastWidgetProvider;
public class MediaPlayerService extends Service implements MediaPlayer.OnCompletionListener,
MediaPlayer.OnPreparedListener, MediaPlayer.OnErrorListener, MediaPlayer.OnSeekCompleteListener,
MediaPlayer.OnInfoListener, MediaPlayer.OnBufferingUpdateListener,
AudioManager.OnAudioFocusChangeListener {
private final String TAG = getClass().getSimpleName();
// Binder given to clients
private final IBinder iBinder = new LocalBinder();
private MediaPlayer mediaPlayer;
//Used to pause/resume MediaPlayer
private int resumePosition;
private AudioManager audioManager;
//Handle incoming phone calls
private boolean ongoingCall = false;
private PhoneStateListener phoneStateListener;
private TelephonyManager telephonyManager;
//List of available Audio files
private int audioIndex = -1;
private PlaylistItem activeAudio; //an object of the currently playing audio
private Cursor mCursor;
private BroadcastReceiver playNewAudio = getNewAudioBroadcastReceiver();
//Becoming noisy (headphone removed)
private BroadcastReceiver becomingNoisyReceiver = getBecomingNoisyReceiver();
public static final String ACTION_PLAY = "leoisasmendi.android.com.suricatepodcast.ACTION_PLAY";
public static final String ACTION_PAUSE = "leoisasmendi.android.com.suricatepodcast.ACTION_PAUSE";
public static final String ACTION_PREVIOUS = "leoisasmendi.android.com.suricatepodcast.ACTION_PREVIOUS";
public static final String ACTION_NEXT = "leoisasmendi.android.com.suricatepodcast.ACTION_NEXT";
public static final String ACTION_STOP = "leoisasmendi.android.com.suricatepodcast.ACTION_STOP";
public static final String STATUS_ERROR = "leoisasmendi.android.com.suricatepodcast.STATUS_ERROR";
public static final String STATUS_DONE = "leoisasmendi.android.com.suricatepodcast.STATUS_DONE";
public static final String STATUS_FETCHING = "leoisasmendi.android.com.suricatepodcast.STATUS_FETCHING";
public static final String STATUS_PLAYING = "leoisasmendi.android.com.suricatepodcast.STATUS_PLAYING";
public static final String STATUS_STOPED = "leoisasmendi.android.com.suricatepodcast.STATUS_STOPED";
public static final String STATUS_PAUSED = "leoisasmendi.android.com.suricatepodcast.STATUS_PAUSED";
public static final String MEDIA_UPDATED = "leoisasmendi.android.com.suricatepodcast.MEDIA_UPDATED";
public static final String NOTIFICATION = "leoisasmendi.android.com.suricatepodcast";
public static final String STATUS = "status";
//MediaSession
private MediaSessionManager mediaSessionManager;
private MediaSessionCompat mediaSession;
private MediaControllerCompat.TransportControls transportControls;
//AudioPlayer notification ID
private static final int NOTIFICATION_ID = 101;
public MediaPlayerService() {
}
@Override
public void onCreate() {
super.onCreate();
// Perform one-time setup procedures
mCursor = getContentResolver().query(DataProvider.CONTENT_URI,
null,
"",
null,
null);
// Manage incoming phone calls during playback.
// Pause MediaPlayer on incoming call,
// Resume on hangup.
callStateListener();
//ACTION_AUDIO_BECOMING_NOISY -- change in audio outputs -- BroadcastReceiver
registerBecomingNoisyReceiver();
//Listen for new Audio to play -- BroadcastReceiver
register_playNewAudio();
}
private void initMediaPlayer() {
mediaPlayer = new MediaPlayer();
//Set up MediaPlayer event listeners
mediaPlayer.setOnCompletionListener(this);
mediaPlayer.setOnErrorListener(this);
mediaPlayer.setOnPreparedListener(this);
mediaPlayer.setOnBufferingUpdateListener(this);
mediaPlayer.setOnSeekCompleteListener(this);
mediaPlayer.setOnInfoListener(this);
//Reset so that the MediaPlayer is not pointing to another data source
mediaPlayer.reset();
mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC);
try {
publishStatus(STATUS_FETCHING);
// Set the data source to the mediaFile location
mediaPlayer.setDataSource(activeAudio.getAudio());
mediaPlayer.prepareAsync();
} catch (Exception e) {
e.printStackTrace();
publishStatus(STATUS_ERROR);
stopSelf();
Log.d(TAG, "initMediaPlayer: " + R.string.media_player_error_1);
}
}
private void initMediaSession() throws RemoteException {
if (mediaSessionManager != null) return; //mediaSessionManager exists
mediaSessionManager = (MediaSessionManager) getSystemService(Context.MEDIA_SESSION_SERVICE);
// Create a new MediaSession
mediaSession = new MediaSessionCompat(getApplicationContext(), "AudioPlayer");
//Get MediaSessions transport controls
transportControls = mediaSession.getController().getTransportControls();
//set MediaSession -> ready to receive media commands
mediaSession.setActive(true);
//indicate that the MediaSession handles transport control commands
// through its MediaSessionCompat.Callback.
mediaSession.setFlags(MediaSessionCompat.FLAG_HANDLES_TRANSPORT_CONTROLS);
//Set mediaSession's MetaData
updateMetaData();
// Attach Callback to receive MediaSession updates
mediaSession.setCallback(new MediaSessionCompat.Callback() {
// Implement callbacks
@Override
public void onPlay() {
super.onPlay();
resumeMedia();
publishStatus(STATUS_PLAYING);
buildNotification(PlaybackStatus.PLAYING);
}
@Override
public void onPause() {
super.onPause();
pauseMedia();
publishStatus(STATUS_PAUSED);
buildNotification(PlaybackStatus.PAUSED);
}
@Override
public void onSkipToNext() {
super.onSkipToNext();
skipToNext();
updateMetaData();
buildNotification(PlaybackStatus.PLAYING);
}
@Override
public void onSkipToPrevious() {
super.onSkipToPrevious();
skipToPrevious();
updateMetaData();
buildNotification(PlaybackStatus.PLAYING);
}
@Override
public void onStop() {
super.onStop();
publishStatus(STATUS_STOPED);
removeNotification();
//Stop the service
stopSelf();
}
});
}
private void publishStatus(String status) {
Intent intent = new Intent(NOTIFICATION);
if (status.equals(MEDIA_UPDATED)) {
intent.putExtra("EXTRA_TITLE", activeAudio.getTitle());
intent.putExtra("EXTRA_DURATION", activeAudio.getDuration());
}
intent.putExtra(STATUS, status);
sendBroadcast(intent);
}
private void updateMetaData() {
if (activeAudio != null) {
Bitmap albumArt = BitmapFactory.decodeResource(getResources(),
R.drawable.ic_default_poster); //replace with medias albumArt
// Update the current metadata
mediaSession.setMetadata(new MediaMetadataCompat.Builder()
.putBitmap(MediaMetadataCompat.METADATA_KEY_ALBUM_ART, albumArt)
.putString(MediaMetadataCompat.METADATA_KEY_TITLE, activeAudio.getTitle())
.build());
}
}
private void buildNotification(PlaybackStatus playbackStatus) {
//TODO: REFACTOR updateWidget method
updateWidgets(playbackStatus);
int notificationAction = R.drawable.ic_media_control_pause;//needs to be initialized
PendingIntent play_pauseAction = null;
//Build a new notification according to the current state of the MediaPlayer
if (playbackStatus == PlaybackStatus.PLAYING) {
notificationAction = R.drawable.ic_media_control_pause;
//create the pause action
play_pauseAction = playbackAction(1);
} else if (playbackStatus == PlaybackStatus.PAUSED) {
notificationAction = R.drawable.ic_media_control_play;
//create the play action
play_pauseAction = playbackAction(0);
}
//TODO: replace this with my icon
Bitmap largeIcon = BitmapFactory.decodeResource(getResources(),
R.drawable.ic_default_poster); //replace with your own image
// Create a new Notification
NotificationCompat.Builder notificationBuilder = (NotificationCompat.Builder) new NotificationCompat.Builder(this)
.setShowWhen(false)
// Set the Notification style
.setStyle(new NotificationCompat.MediaStyle()
// Attach our MediaSession token
.setMediaSession(mediaSession.getSessionToken())
// Show our playback controls in the compact notification view.
.setShowActionsInCompactView(0, 1, 2))
// Set the Notification color
.setColor(ContextCompat.getColor(getBaseContext(), R.color.colorPrimaryDark))
// Set the large and small icons
.setLargeIcon(largeIcon)
.setSmallIcon(R.drawable.ic_headphones)
// Set Notification content information
.setContentText(activeAudio.getTitle())
// Add playback actions
.addAction(R.drawable.ic_media_control_prev, "previous", playbackAction(3))
.addAction(notificationAction, "pause", play_pauseAction)
.addAction(R.drawable.ic_media_control_next, "next", playbackAction(2));
((NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE)).notify(NOTIFICATION_ID, notificationBuilder.build());
}
private void updateWidgets(PlaybackStatus playbackStatus) {
final RemoteViews view = new RemoteViews(getPackageName(), R.layout.podcast_widget_player);
if (playbackStatus == PlaybackStatus.PLAYING) {
view.setImageViewResource(R.id.widget_play, R.drawable.media_player_pause_24x24);
} else if (playbackStatus == PlaybackStatus.PAUSED) {
view.setImageViewResource(R.id.widget_play, R.drawable.media_player_play_24x24);
}
view.setTextViewText(R.id.widget_title, activeAudio.getTitle());
view.setTextViewText(R.id.widget_length, activeAudio.getDuration());
Picasso.with(getBaseContext()).setLoggingEnabled(true);
Target target = new Target() {
@Override
public void onBitmapLoaded(Bitmap bitmap, Picasso.LoadedFrom from) {
view.setImageViewBitmap(R.id.widget_thumbail, bitmap);
}
@Override
public void onBitmapFailed(Drawable errorDrawable) {
view.setImageViewResource(R.id.widget_thumbail, R.drawable.picture);
}
@Override
public void onPrepareLoad(Drawable placeHolderDrawable) {
view.setImageViewResource(R.id.widget_thumbail, R.drawable.picture);
}
};
Picasso.with(getBaseContext())
.load(activeAudio.getPoster())
.into(target);
// Push update for this widget to the home screen
ComponentName thisWidget = new ComponentName(this, PodcastWidgetProvider.class);
AppWidgetManager manager = AppWidgetManager.getInstance(this);
manager.updateAppWidget(thisWidget, view);
}
private void removeNotification() {
NotificationManager notificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
notificationManager.cancel(NOTIFICATION_ID);
}
private PendingIntent playbackAction(int actionNumber) {
Intent playbackAction = new Intent(this, MediaPlayerService.class);
switch (actionNumber) {
case 0:
// Play
playbackAction.setAction(ACTION_PLAY);
return PendingIntent.getService(this, actionNumber, playbackAction, 0);
case 1:
// Pause
playbackAction.setAction(ACTION_PAUSE);
return PendingIntent.getService(this, actionNumber, playbackAction, 0);
case 2:
// Next track
playbackAction.setAction(ACTION_NEXT);
return PendingIntent.getService(this, actionNumber, playbackAction, 0);
case 3:
// Previous track
playbackAction.setAction(ACTION_PREVIOUS);
return PendingIntent.getService(this, actionNumber, playbackAction, 0);
default:
break;
}
return null;
}
private void handleIncomingActions(Intent playbackAction) {
if (playbackAction == null || playbackAction.getAction() == null) return;
String actionString = playbackAction.getAction();
if (actionString.equalsIgnoreCase(ACTION_PLAY)) {
transportControls.play();
} else if (actionString.equalsIgnoreCase(ACTION_PAUSE)) {
transportControls.pause();
} else if (actionString.equalsIgnoreCase(ACTION_NEXT)) {
transportControls.skipToNext();
} else if (actionString.equalsIgnoreCase(ACTION_PREVIOUS)) {
transportControls.skipToPrevious();
} else if (actionString.equalsIgnoreCase(ACTION_STOP)) {
transportControls.stop();
}
}
private void loadActiveAudio() {
if (audioIndex != -1 && audioIndex < mCursor.getCount()) {
//index is in a valid range
mCursor.moveToPosition(audioIndex);
activeAudio = ParserUtils.buildPlaylistItem(mCursor);
} else {
stopSelf();
}
}
@Override
public IBinder onBind(Intent intent) {
// TODO: Return the communication channel to the service.
return iBinder;
}
//The system calls this method when an activity, requests the service be started
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
try {
//Load data from SharedPreferences
StorageUtil storage = new StorageUtil(getApplicationContext());
audioIndex = storage.loadAudioIndex();
Log.d(TAG, "onStartCommand: " + R.string.media_player_connecting);
loadActiveAudio();
} catch (NullPointerException e) {
Log.d(TAG, "onStartCommand: " + R.string.media_player_error_1);
stopSelf();
}
//Request audio focus
if (!requestAudioFocus()) {
//Could not gain focus
stopSelf();
}
if (mediaSessionManager == null) {
try {
initMediaSession();
initMediaPlayer();
} catch (RemoteException e) {
e.printStackTrace();
stopSelf();
}
buildNotification(PlaybackStatus.PLAYING);
}
//Handle Intent action from MediaSession.TransportControls
handleIncomingActions(intent);
return super.onStartCommand(intent, flags, startId);
}
@Override
public void onDestroy() {
super.onDestroy();
if (mediaPlayer != null) {
stopMedia();
mediaPlayer.release();
}
removeAudioFocus();
//Disable the PhoneStateListener
if (phoneStateListener != null) {
telephonyManager.listen(phoneStateListener, PhoneStateListener.LISTEN_NONE);
}
removeNotification();
//unregister BroadcastReceivers
unregisterReceiver(becomingNoisyReceiver);
unregisterReceiver(playNewAudio);
//clear cached playlist
new StorageUtil(getApplicationContext()).clearCachedAudioPlaylist();
mCursor.close();
}
@Override
public void onBufferingUpdate(MediaPlayer mediaPlayer, int percent) {
//Invoked indicating buffering status of
//a media resource being streamed over the network.
}
@Override
public void onCompletion(MediaPlayer mediaPlayer) {
//Invoked when playback of a media source has completed.
stopMedia();
//stop the service
stopSelf();
}
//Handle errors
@Override
public boolean onError(MediaPlayer mp, int what, int extra) {
//Invoked when there has been an error during an asynchronous operation
Log.d(TAG, "onError: " + R.string.media_player_error_2);
switch (what) {
case MediaPlayer.MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK:
Log.d("MediaPlayer Error", "MEDIA ERROR NOT VALID FOR PROGRESSIVE PLAYBACK " + extra);
break;
case MediaPlayer.MEDIA_ERROR_SERVER_DIED:
Log.d("MediaPlayer Error", "MEDIA ERROR SERVER DIED " + extra);
break;
case MediaPlayer.MEDIA_ERROR_UNKNOWN:
Log.d("MediaPlayer Error", "MEDIA ERROR UNKNOWN " + extra);
break;
}
return false;
}
@Override
public boolean onInfo(MediaPlayer mp, int what, int extra) {
//Invoked to communicate some info.
return false;
}
@Override
public void onPrepared(MediaPlayer mp) {
//Invoked when the media source is ready for playback.
Log.d(TAG, "onPrepared: " + R.string.media_player_successful_fetch);
publishStatus(STATUS_DONE);
playMedia();
}
@Override
public void onSeekComplete(MediaPlayer mp) {
//Invoked indicating the completion of a seek operation.
}
@Override
public void onAudioFocusChange(int focusState) {
//Invoked when the audio focus of the system is updated.
switch (focusState) {
case AudioManager.AUDIOFOCUS_GAIN:
// resume playback
if (mediaPlayer == null) initMediaPlayer();
else if (!mediaPlayer.isPlaying()) mediaPlayer.start();
mediaPlayer.setVolume(1.0f, 1.0f);
break;
case AudioManager.AUDIOFOCUS_LOSS:
// Lost focus for an unbounded amount of time: stop playback and release media player
if (mediaPlayer.isPlaying()) mediaPlayer.stop();
mediaPlayer.release();
mediaPlayer = null;
break;
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
// Lost focus for a short time, but we have to stop
// playback. We don't release the media player because playback
// is likely to resume
if (mediaPlayer.isPlaying()) mediaPlayer.pause();
break;
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
// Lost focus for a short time, but it's ok to keep playing
// at an attenuated level
if (mediaPlayer.isPlaying()) mediaPlayer.setVolume(0.1f, 0.1f);
break;
}
}
private boolean requestAudioFocus() {
audioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
int result = audioManager.requestAudioFocus(this, AudioManager.STREAM_MUSIC, AudioManager.AUDIOFOCUS_GAIN);
if (result == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
//Focus gained
return true;
}
//Could not gain focus
return false;
}
private boolean removeAudioFocus() {
return AudioManager.AUDIOFOCUS_REQUEST_GRANTED ==
audioManager.abandonAudioFocus(this);
}
private BroadcastReceiver getNewAudioBroadcastReceiver() {
return new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
//Get the new media index form SharedPreferences
audioIndex = new StorageUtil(getApplicationContext()).loadAudioIndex();
loadActiveAudio();
//A PLAY_NEW_AUDIO action received
//reset mediaPlayer to play the new Audio
stopMedia();
mediaPlayer.reset();
initMediaPlayer();
updateMetaData();
buildNotification(PlaybackStatus.PLAYING);
}
};
}
private void register_playNewAudio() {
//Register playNewMedia receiver
IntentFilter filter = new IntentFilter(MainActivity.Broadcast_PLAY_NEW_AUDIO);
registerReceiver(playNewAudio, filter);
}
//Handle incoming phone calls
private void callStateListener() {
// Get the telephony manager
telephonyManager = (TelephonyManager) getSystemService(Context.TELEPHONY_SERVICE);
//Starting listening for PhoneState changes
phoneStateListener = new PhoneStateListener() {
@Override
public void onCallStateChanged(int state, String incomingNumber) {
switch (state) {
//if at least one call exists or the phone is ringing
//pause the MediaPlayer
case TelephonyManager.CALL_STATE_OFFHOOK:
case TelephonyManager.CALL_STATE_RINGING:
if (mediaPlayer != null) {
pauseMedia();
ongoingCall = true;
}
break;
case TelephonyManager.CALL_STATE_IDLE:
// Phone idle. Start playing.
if (mediaPlayer != null) {
if (ongoingCall) {
ongoingCall = false;
resumeMedia();
}
}
break;
}
}
};
// Register the listener with the telephony manager
// Listen for changes to the device call state.
telephonyManager.listen(phoneStateListener,
PhoneStateListener.LISTEN_CALL_STATE);
}
//Becoming noisy (headphone removed)
private BroadcastReceiver getBecomingNoisyReceiver() {
return new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
//pause audio on ACTION_AUDIO_BECOMING_NOISY
pauseMedia();
buildNotification(PlaybackStatus.PAUSED);
}
};
}
private void registerBecomingNoisyReceiver() {
//register after getting audio focus
IntentFilter intentFilter = new IntentFilter(AudioManager.ACTION_AUDIO_BECOMING_NOISY);
registerReceiver(becomingNoisyReceiver, intentFilter);
}
//MEDIA PLAYER BASIC CONTROLS
private void playMedia() {
if (!mediaPlayer.isPlaying()) {
Log.d(TAG, "playMedia: " + R.string.media_player_playing);
mediaPlayer.start();
publishStatus(MEDIA_UPDATED);
publishStatus(STATUS_PLAYING);
}
}
private void stopMedia() {
if (mediaPlayer == null) return;
if (mediaPlayer.isPlaying()) {
mediaPlayer.stop();
publishStatus(STATUS_STOPED);
}
}
private void pauseMedia() {
if (mediaPlayer.isPlaying()) {
mediaPlayer.pause();
resumePosition = mediaPlayer.getCurrentPosition();
publishStatus(STATUS_PAUSED);
}
}
private void resumeMedia() {
if (!mediaPlayer.isPlaying()) {
mediaPlayer.seekTo(resumePosition);
mediaPlayer.start();
publishStatus(STATUS_PLAYING);
}
}
private void skipToNext() {
if (audioIndex == mCursor.getCount() - 1) {
//if last in playlist
audioIndex = 0;
} else {
//get next in playlist
audioIndex = ++audioIndex;
}
loadActiveAudio();
//Update stored index
new StorageUtil(getApplicationContext()).storeAudioIndex(audioIndex);
stopMedia();
//reset mediaPlayer
mediaPlayer.reset();
initMediaPlayer();
}
private void skipToPrevious() {
if (audioIndex == 0) {
audioIndex = mCursor.getCount() - 1;
} else {
audioIndex = --audioIndex;
}
loadActiveAudio();
//Update stored index
new StorageUtil(getApplicationContext()).storeAudioIndex(audioIndex);
stopMedia();
//reset mediaPlayer
mediaPlayer.reset();
initMediaPlayer();
}
// INNER CLASS
public class LocalBinder extends Binder {
public MediaPlayerService getService() {
return MediaPlayerService.this;
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.